import streamlit as st from langchain_experimental.llms.ollama_functions import OllamaFunctions model = OllamaFunctions(model="gemma:7b") model = model.bind( functions=[ { "name": "get_current_weather", "description": "Get the current weather in a given location", "parameters": { "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, " "e.g. San Francisco, CA", }, "unit": { "type": "string", "enum": ["celsius", "fahrenheit"], }, }, "required": ["location"], }, } ], function_call={"name": "get_current_weather"}, ) user_input = st.text_input("Enter your question:") model.invoke(user_input)