of_LLms / main.py
ka1kuk's picture
Update main.py
b216cb2 verified
raw
history blame
1.39 kB
import gradio as gr
from langchain_experimental.llms.ollama_functions import OllamaFunctions
# Initialize the Ollama model
model = OllamaFunctions(model="gemma:7b")
model = model.bind(
functions=[
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g., San Francisco, CA",
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
},
},
"required": ["location"],
},
}
],
function_call={"name": "get_current_weather"},
)
# Function to be called by the Gradio interface
def get_weather(user_input):
result = model.invoke(user_input)
return result
# Create the Gradio interface
iface = gr.Interface(
fn=get_weather,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter Location and Unit (e.g., 'San Francisco, CA', 'celsius')"),
outputs="text",
title="Weather Information",
description="Enter a location to get the current weather."
)
# Launch the application
iface.launch()