from fastapi import FastAPI from langchain_community.llms import Ollama app = FastAPI() # Initialize the Ollama model llm = Ollama(model="tinyllama") @app.get("/") async def root(): return {"message": "Ollama is running on Hugging Face Spaces!"} @app.get("/chat") async def chat(query: str): response = llm.invoke(query) return {"response": response}