Spaces:
Sleeping
Sleeping
File size: 2,118 Bytes
e84b6e6 70e3e32 8d52d3e e84b6e6 70e3e32 e84b6e6 8dc8132 70e3e32 8d52d3e e84b6e6 c68ae28 8d52d3e e84b6e6 8d52d3e 9488825 e84b6e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import gradio as gr
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
from langchain.agents import AgentExecutor, Tool, ZeroShotAgent, load_tools
from langchain.chains import LLMChain
from langchain_community.utilities import GoogleSearchAPIWrapper
import os
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
SERPAPI_API_KEY = os.getenv("SERPAPI_API_KEY")
llm = ChatGoogleGenerativeAI(
google_api_key=GOOGLE_API_KEY,
model="gemini-pro",
temperature=0.7
)
tools = load_tools(['wikipedia','llm-math'], llm=llm) #,'wikipedia',serpapi
# search = GoogleSearchAPIWrapper()
# tools = [
# Tool(
# name="Search",
# func=search.run,
# description="useful for when you need to answer questions about current events",
# )
# ]
prefix = """Have a conversation with a human, answering the following questions as best you can. You have access to the following tools:"""
suffix = """Begin!"
{chat_history}
Question: {input}
{agent_scratchpad}"""
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=prefix,
suffix=suffix,
input_variables=["input", "chat_history", "agent_scratchpad"],
)
memory = ConversationBufferMemory(memory_key="chat_history",return_messages=True)
llm_chain = LLMChain(llm=llm, prompt=prompt)
agent = ZeroShotAgent(llm_chain=llm_chain, tools=tools, verbose=True)
agent_chain = AgentExecutor.from_agent_and_tools(
agent=agent, tools=tools, verbose=True, memory=memory
)
# conversation = ConversationChain(
# llm=llm,
# verbose=True,
# memory=ConversationBufferMemory()
# )
def chat(prompt):
res = agent_chain.run(input=prompt)
return res,memory.load_memory_variables({})#.chat_history
iface = gr.Interface(
fn=chat,
inputs=[gr.Textbox(lines=2, placeholder="Type your message here")],
outputs=[gr.Textbox(label="Response"), gr.Textbox(label="Conversation History", lines=10)], # Adjusted for multiple lines
title="Chat with Gemini-Pro",
#live=True, # Enable live updates
)
iface.launch(debug=True) |