Spaces:
Running
Running
from fastapi import FastAPI | |
from langchain_community.llms import Ollama | |
app = FastAPI() | |
# Initialize the Ollama model | |
llm = Ollama(model="tinyllama") | |
async def root(): | |
return {"message": "Ollama is running on Hugging Face Spaces!"} | |
async def chat(query: str): | |
response = llm.invoke(query) | |
return {"response": response} |