Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ def LoadData(openai_key):
|
|
24 |
|
25 |
persist_directory = 'realdb_LLM'
|
26 |
|
27 |
-
embedding = OpenAIEmbeddings()
|
28 |
|
29 |
vectordb = Chroma(
|
30 |
persist_directory=persist_directory,
|
@@ -38,15 +38,9 @@ def LoadData(openai_key):
|
|
38 |
else:
|
39 |
return "μ¬μ©νμλ API Keyλ₯Ό μ
λ ₯νμ¬ μ£ΌμκΈ° λ°λλλ€."
|
40 |
|
41 |
-
|
42 |
-
print(llm_response['result'])
|
43 |
-
print('\n\nSources:')
|
44 |
-
for source in llm_response["source_documents"]:
|
45 |
-
print(source.metadata['source'])
|
46 |
-
|
47 |
-
|
48 |
# μ±λ΄μ λ΅λ³μ μ²λ¦¬νλ ν¨μ
|
49 |
-
def respond(message, chat_history
|
50 |
try:
|
51 |
qa_chain = RetrievalQA.from_chain_type(
|
52 |
llm=OpenAI(temperature=0.4),
|
@@ -75,9 +69,6 @@ def respond(message, chat_history, temperature):
|
|
75 |
|
76 |
return " ", chat_history
|
77 |
|
78 |
-
# return "", chat_history
|
79 |
-
|
80 |
-
|
81 |
import gradio as gr
|
82 |
|
83 |
# μ±λ΄ μ€λͺ
|
|
|
24 |
|
25 |
persist_directory = 'realdb_LLM'
|
26 |
|
27 |
+
embedding = OpenAIEmbeddings(openai_key)
|
28 |
|
29 |
vectordb = Chroma(
|
30 |
persist_directory=persist_directory,
|
|
|
38 |
else:
|
39 |
return "μ¬μ©νμλ API Keyλ₯Ό μ
λ ₯νμ¬ μ£ΌμκΈ° λ°λλλ€."
|
40 |
|
41 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
# μ±λ΄μ λ΅λ³μ μ²λ¦¬νλ ν¨μ
|
43 |
+
def respond(message, chat_history):
|
44 |
try:
|
45 |
qa_chain = RetrievalQA.from_chain_type(
|
46 |
llm=OpenAI(temperature=0.4),
|
|
|
69 |
|
70 |
return " ", chat_history
|
71 |
|
|
|
|
|
|
|
72 |
import gradio as gr
|
73 |
|
74 |
# μ±λ΄ μ€λͺ
|