AminFaraji commited on
Commit
7c79220
·
verified ·
1 Parent(s): cf7e13a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -153
app.py CHANGED
@@ -1,153 +0,0 @@
1
- try:
2
- from langchain_community.vectorstores import Chroma
3
- except:
4
- from langchain_community.vectorstores import Chroma
5
-
6
- from langchain.chains import ConversationChain
7
- from langchain.chains.conversation.memory import ConversationBufferWindowMemory
8
-
9
-
10
- # Import the necessary libraries.
11
- from langchain_core.prompts import ChatPromptTemplate
12
- from langchain_groq import ChatGroq
13
-
14
- # Initialize a ChatGroq object with a temperature of 0 and the "mixtral-8x7b-32768" model.
15
- llm = ChatGroq(temperature=0, model_name="llama3-70b-8192",api_key='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')
16
-
17
- from langchain_community.embeddings import SentenceTransformerEmbeddings
18
-
19
- embeddings = SentenceTransformerEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2", model_kwargs={"trust_remote_code":True})
20
-
21
-
22
-
23
-
24
-
25
- memory = ConversationBufferWindowMemory(
26
- memory_key="history", k=3, return_only_outputs=True
27
- )
28
-
29
-
30
-
31
-
32
-
33
-
34
- query_text="what did alice say to rabbit"
35
-
36
- # Prepare the DB.
37
- #embedding_function = OpenAIEmbeddings() # main
38
-
39
- CHROMA_PATH = "chroma8"
40
- # call the chroma generated in a directory
41
- db = Chroma(persist_directory=CHROMA_PATH, embedding_function=embeddings)
42
-
43
- # Search the DB for similar documents to the query.
44
- results = db.similarity_search_with_relevance_scores(query_text, k=2)
45
- if len(results) == 0 or results[0][1] < 0.5:
46
- print(f"Unable to find matching results.")
47
-
48
-
49
-
50
-
51
-
52
-
53
-
54
-
55
-
56
- from langchain import PromptTemplate
57
- query_text = "when did alice see mad hatter"
58
-
59
- results = db.similarity_search_with_relevance_scores(query_text, k=3)
60
- if len(results) == 0 or results[0][1] < 0.5:
61
- print(f"Unable to find matching results.")
62
-
63
-
64
- context_text = "\n\n---\n\n".join([doc.page_content for doc, _score in results ])
65
-
66
- template = """
67
- The following is a conversation between a human an AI. Answer question based only on the conversation.
68
-
69
- Current conversation:
70
- {history}
71
-
72
- """
73
-
74
-
75
-
76
- s="""
77
-
78
- \n question: {input}
79
-
80
- \n answer:""".strip()
81
-
82
-
83
- prompt = PromptTemplate(input_variables=["history", "input"], template=template+context_text+'\n'+s)
84
-
85
-
86
-
87
-
88
- chain = ConversationChain(
89
- llm=llm,
90
-
91
- prompt=prompt,
92
- memory=memory,
93
- verbose=True,
94
- )
95
-
96
-
97
-
98
-
99
- # Generate a response from the Llama model
100
- def get_llama_response(message: str, history: list) -> str:
101
- """
102
- Generates a conversational response from the Llama model.
103
-
104
- Parameters:
105
- message (str): User's input message.
106
- history (list): Past conversation history.
107
-
108
- Returns:
109
- str: Generated response from the Llama model.
110
- """
111
- query_text =message
112
-
113
- results = db.similarity_search_with_relevance_scores(query_text, k=2)
114
- if len(results) == 0 or results[0][1] < 0.5:
115
- print(f"Unable to find matching results.")
116
-
117
-
118
- context_text = "\n\n---\n\n".join([doc.page_content for doc, _score in results ])
119
-
120
- template = """
121
- The following is a conversation between a human an AI. Answer question based only on the conversation.
122
-
123
- Current conversation:
124
- {history}
125
-
126
- """
127
-
128
-
129
-
130
- s="""
131
-
132
- \n question: {input}
133
-
134
- \n answer:""".strip()
135
-
136
-
137
- prompt = PromptTemplate(input_variables=["history", "input"], template=template+context_text+'\n'+s)
138
-
139
- #print(template)
140
- chain.prompt=prompt
141
- res = chain.predict(input=query_text)
142
- return res
143
- #return response.strip()
144
-
145
-
146
-
147
- import gradio as gr
148
- iface = gr.Interface(fn=get_llama_response, inputs=gr.Textbox(),
149
- outputs="textbox")
150
- iface.launch(share=True)
151
-
152
-
153
-