Saurabh46 commited on
Commit
c8779cd
·
1 Parent(s): 3107899

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -30
app.py CHANGED
@@ -1,48 +1,39 @@
1
- from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader, LLMPredictor, ServiceContext, StorageContext, load_index_from_storage
2
- from langchain import OpenAI
3
- import gradio
 
4
  import os
5
 
6
  os.environ["OPENAI_API_KEY"] = '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'
7
 
8
  def construct_index(directory_path):
9
- # set number of output tokens
10
- num_outputs = 256
 
 
11
 
12
- _llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-4", max_tokens=num_outputs))
13
 
14
- service_context = ServiceContext.from_defaults(llm_predictor=_llm_predictor)
15
 
16
- docs = SimpleDirectoryReader(directory_path).load_data()
17
 
18
- index = GPTVectorStoreIndex.from_documents(docs, service_context=service_context)
19
-
20
- #Directory in which the indexes will be stored
21
- index.storage_context.persist(persist_dir="indexes")
22
 
23
  return index
24
 
25
  def chatbot(input_text):
26
-
27
- # rebuild storage context
28
- storage_context = StorageContext.from_defaults(persist_dir="indexes")
29
-
30
- #load indexes from directory using storage_context
31
- query_engne = load_index_from_storage(storage_context).as_query_engine()
32
-
33
- response = query_engne.query(input_text)
34
-
35
- #returning the response
36
  return response.response
37
 
38
- #Creating the web UIusing gradio
39
- iface = gradio.Interface(fn=chatbot,
40
- inputs=gradio.inputs.Textbox(lines=4, label="Enter your question here"),
41
- outputs=gradio.outputs.Textbox(label="Generated Text"),
42
- title="Custom-trained AI Chatbot")
43
 
44
- #Constructing indexes based on the documents in trainingData folder
45
  index = construct_index("trainingData")
46
-
47
- #launching the web UI using gradio
48
  iface.launch()
 
 
1
+ from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
2
+ from langchain.chat_models import ChatOpenAI
3
+ import gradio as gr
4
+ import sys
5
  import os
6
 
7
  os.environ["OPENAI_API_KEY"] = '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'
8
 
9
  def construct_index(directory_path):
10
+ max_input_size = 4096
11
+ num_outputs = 512
12
+ max_chunk_overlap = 20
13
+ chunk_size_limit = 600
14
 
15
+ prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
16
 
17
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4", max_tokens=num_outputs))
18
 
19
+ documents = SimpleDirectoryReader(directory_path).load_data()
20
 
21
+ index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
22
+
23
+ index.save_to_disk('index.json')
 
24
 
25
  return index
26
 
27
  def chatbot(input_text):
28
+ index = GPTSimpleVectorIndex.load_from_disk('index.json')
29
+ response = index.query(input_text, response_mode="compact")
 
 
 
 
 
 
 
 
30
  return response.response
31
 
32
+ iface = gr.Interface(fn=chatbot,
33
+ inputs=gr.components.Textbox(lines=7, label="Enter your text"),
34
+ outputs="text",
35
+ title="My Custom-trained AI Chatbot")
 
36
 
 
37
  index = construct_index("trainingData")
 
 
38
  iface.launch()
39
+