File size: 2,742 Bytes
06b47dc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import { CodeHighlightAdapterProvider } from "@mantine/code-highlight";
import { usePubSub } from "create-pubsub/react";
import { useMemo } from "react";
import {
  modelLoadingProgressPubSub,
  modelSizeInMegabytesPubSub,
  queryPubSub,
  responsePubSub,
  settingsPubSub,
  textGenerationStatePubSub,
} from "../../modules/pubSub";
import { shikiAdapter } from "../../modules/shiki";
import "@mantine/code-highlight/styles.css";
import AiModelDownloadAllowanceContent from "./AiModelDownloadAllowanceContent";
import AiResponseContent from "./AiResponseContent";
import ChatInterface from "./ChatInterface";
import LoadingModelContent from "./LoadingModelContent";
import PreparingContent from "./PreparingContent";

export default function AiResponseSection() {
  const [query] = usePubSub(queryPubSub);
  const [response] = usePubSub(responsePubSub);
  const [textGenerationState, setTextGenerationState] = usePubSub(
    textGenerationStatePubSub,
  );
  const [modelLoadingProgress] = usePubSub(modelLoadingProgressPubSub);
  const [settings] = usePubSub(settingsPubSub);
  const [modelSizeInMegabytes] = usePubSub(modelSizeInMegabytesPubSub);

  return useMemo(() => {
    if (!settings.enableAiResponse || textGenerationState === "idle") {
      return null;
    }

    const generatingStates = [
      "generating",
      "interrupted",
      "completed",
      "failed",
    ];
    if (generatingStates.includes(textGenerationState)) {
      return (
        <CodeHighlightAdapterProvider adapter={shikiAdapter}>
          <AiResponseContent
            textGenerationState={textGenerationState}
            response={response}
            setTextGenerationState={setTextGenerationState}
          />

          {textGenerationState === "completed" && (
            <ChatInterface initialQuery={query} initialResponse={response} />
          )}
        </CodeHighlightAdapterProvider>
      );
    }

    if (textGenerationState === "loadingModel") {
      return (
        <LoadingModelContent
          modelLoadingProgress={modelLoadingProgress}
          modelSizeInMegabytes={modelSizeInMegabytes}
        />
      );
    }

    if (textGenerationState === "preparingToGenerate") {
      return <PreparingContent textGenerationState={textGenerationState} />;
    }

    if (textGenerationState === "awaitingSearchResults") {
      return <PreparingContent textGenerationState={textGenerationState} />;
    }

    if (textGenerationState === "awaitingModelDownloadAllowance") {
      return <AiModelDownloadAllowanceContent />;
    }

    return null;
  }, [
    settings.enableAiResponse,
    textGenerationState,
    response,
    query,
    modelLoadingProgress,
    modelSizeInMegabytes,
    setTextGenerationState,
  ]);
}