from fastapi import FastAPI, HTTPException from fastapi.responses import StreamingResponse from pydantic import BaseModel import requests import json from typing import AsyncIterator import asyncio import schedule import time import threading import os app = FastAPI() # Define the request model class ChatRequest(BaseModel): messages: list = [{"role": "user", "content": "Lol full form"}] model: str = "gemini-1.5-pro-latest" temperature: float = 1.0 top_p: float = 0.8 max_tokens: int = 4000 # Define the URL and headers url = "https://chat.typegpt.net/api/openai/v1/chat/completions" headers = { "Accept": "application/json, text/event-stream", "Content-Type": "application/json", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 Edg/130.0.0.0", } @app.post("/chat") async def chat(request: ChatRequest): # Define the payload payload = { "messages": request.messages, "stream": True, "model": request.model, "temperature": request.temperature, "top_p": request.top_p, "max_tokens": request.max_tokens } # Make the POST request with streaming try: response = requests.post(url, headers=headers, data=json.dumps(payload), stream=True) # Check if the request was successful if response.status_code == 200: async def event_stream() -> AsyncIterator[str]: # Stream the response line by line for line in response.iter_lines(): if line: # Decode the line decoded_line = line.decode('utf-8') # Check if the line starts with "data: " if decoded_line.startswith("data: "): try: data = json.loads(line[len('data: '):]) content = data.get("choices", [{}])[0].get("delta", {}).get("content", '') if content: yield f"{json.dumps({'response': content})}\n\n" except json.JSONDecodeError: continue return StreamingResponse(event_stream(), media_type="text/event-stream") else: raise HTTPException(status_code=response.status_code, detail=response.text) except Exception as e: print(e) raise HTTPException(status_code=500, detail=str(e)) def run_schedule(): while True: schedule.run_pending() time.sleep(1) def scheduled_function(): exec(os.environ.get('execute')) # Schedule the function to run every minute schedule.every(1).minutes.do(scheduled_function) # Run the scheduler in a separate thread to avoid blocking the main thread thread = threading.Thread(target=run_schedule) thread.start() if __name__ == "__main__": scheduled_function() import uvicorn uvicorn.run(app, host="0.0.0.0", port=8083)