Echo-ai commited on
Commit
5946574
·
verified ·
1 Parent(s): 1301813

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -10
app.py CHANGED
@@ -1,14 +1,16 @@
1
  from fastapi import FastAPI, HTTPException
 
2
  import requests
 
3
 
4
  app = FastAPI()
5
 
6
- # Ollama internal URL (running locally in the container)
7
  OLLAMA_BASE_URL = "http://localhost:11434"
8
 
9
- # Proxy endpoint to Ollama's API
10
  @app.get("/api/{path:path}")
11
- async def ollama_proxy(path: str, query: str = None):
12
  url = f"{OLLAMA_BASE_URL}/api/{path}"
13
  params = {"query": query} if query else {}
14
  try:
@@ -18,16 +20,25 @@ async def ollama_proxy(path: str, query: str = None):
18
  except requests.exceptions.RequestException as e:
19
  raise HTTPException(status_code=500, detail=str(e))
20
 
21
- @app.post("/api/{path:path}")
22
- async def ollama_proxy_post(path: str, body: dict):
23
- url = f"{OLLAMA_BASE_URL}/api/{path}"
 
24
  try:
25
- response = requests.post(url, json=body)
 
26
  response.raise_for_status()
27
- return response.json()
 
 
 
 
 
 
 
28
  except requests.exceptions.RequestException as e:
29
- raise HTTPException(status_code=500, detail=str(e))
30
 
31
  @app.get("/")
32
  async def root():
33
- return {"message": "Ollama running on Hugging Face Spaces! use the space url"}
 
1
  from fastapi import FastAPI, HTTPException
2
+ from fastapi.responses import StreamingResponse
3
  import requests
4
+ import json
5
 
6
  app = FastAPI()
7
 
8
+ # Ollama internal URL
9
  OLLAMA_BASE_URL = "http://localhost:11434"
10
 
11
+ # Generic proxy for other API endpoints
12
  @app.get("/api/{path:path}")
13
+ async def ollama_proxy_get(path: str, query: str = None):
14
  url = f"{OLLAMA_BASE_URL}/api/{path}"
15
  params = {"query": query} if query else {}
16
  try:
 
20
  except requests.exceptions.RequestException as e:
21
  raise HTTPException(status_code=500, detail=str(e))
22
 
23
+ # Handle /api/chat specifically
24
+ @app.post("/api/chat")
25
+ async def ollama_chat(body: dict):
26
+ url = f"{OLLAMA_BASE_URL}/api/chat"
27
  try:
28
+ # Forward the request to Ollama with streaming support
29
+ response = requests.post(url, json=body, stream=True)
30
  response.raise_for_status()
31
+
32
+ # Stream the response back to the client
33
+ def generate():
34
+ for chunk in response.iter_lines():
35
+ if chunk:
36
+ yield chunk + b"\n"
37
+
38
+ return StreamingResponse(generate(), media_type="text/event-stream")
39
  except requests.exceptions.RequestException as e:
40
+ raise HTTPException(status_code=500, detail=f"Ollama error: {str(e)}")
41
 
42
  @app.get("/")
43
  async def root():
44
+ return {"message": "Ollama API proxy running on Hugging Face Spaces!"}