Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,37 +1,47 @@
|
|
1 |
from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader, LLMPredictor, ServiceContext, StorageContext, load_index_from_storage
|
2 |
-
from langchain
|
3 |
-
import gradio
|
4 |
-
import sys
|
5 |
import os
|
6 |
|
7 |
os.environ["OPENAI_API_KEY"] = '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'
|
8 |
|
9 |
def construct_index(directory_path):
|
10 |
-
|
11 |
-
num_outputs = 512
|
12 |
-
max_chunk_overlap = 20
|
13 |
-
chunk_size_limit = 600
|
14 |
|
15 |
-
|
16 |
|
17 |
-
|
18 |
|
19 |
-
|
20 |
|
21 |
-
index.
|
|
|
|
|
|
|
22 |
|
23 |
return index
|
24 |
|
25 |
def chatbot(input_text):
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
return response.response
|
29 |
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
34 |
|
|
|
35 |
index = construct_index("trainingData")
|
36 |
-
iface.launch()
|
37 |
|
|
|
|
|
|
1 |
from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader, LLMPredictor, ServiceContext, StorageContext, load_index_from_storage
|
2 |
+
from langchain import OpenAI
|
3 |
+
import gradio
|
|
|
4 |
import os
|
5 |
|
6 |
os.environ["OPENAI_API_KEY"] = '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'
|
7 |
|
8 |
def construct_index(directory_path):
|
9 |
+
num_outputs = 256
|
|
|
|
|
|
|
10 |
|
11 |
+
_llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-4", max_tokens=num_outputs))
|
12 |
|
13 |
+
service_context = ServiceContext.from_defaults(llm_predictor=_llm_predictor)
|
14 |
|
15 |
+
docs = SimpleDirectoryReader(directory_path).load_data()
|
16 |
|
17 |
+
index = GPTVectorStoreIndex.from_documents(docs, service_context=service_context)
|
18 |
+
|
19 |
+
#Directory in which the indexes will be stored
|
20 |
+
index.storage_context.persist(persist_dir="indexes")
|
21 |
|
22 |
return index
|
23 |
|
24 |
def chatbot(input_text):
|
25 |
+
|
26 |
+
# rebuild storage context
|
27 |
+
storage_context = StorageContext.from_defaults(persist_dir="indexes")
|
28 |
+
|
29 |
+
#load indexes from directory using storage_context
|
30 |
+
query_engne = load_index_from_storage(storage_context).as_query_engine()
|
31 |
+
|
32 |
+
response = query_engne.query(input_text)
|
33 |
+
|
34 |
+
#returning the response
|
35 |
return response.response
|
36 |
|
37 |
+
#Creating the web UIusing gradio
|
38 |
+
iface = gradio.Interface(fn=chatbot,
|
39 |
+
inputs=gradio.inputs.Textbox(lines=4, label="Enter your question here"),
|
40 |
+
outputs=gradio.outputs.Textbox(label="Generated Text"),
|
41 |
+
title="Custom-trained AI Chatbot")
|
42 |
|
43 |
+
#Constructing indexes based on the documents in trainingData folder
|
44 |
index = construct_index("trainingData")
|
|
|
45 |
|
46 |
+
#launching the web UI using gradio
|
47 |
+
iface.launch()
|