File size: 1,179 Bytes
37f9623 bb2b425 37f9623 cec3202 dd28fa5 cec3202 0ab8732 cec3202 37f9623 cec3202 37f9623 cec3202 0ab8732 cec3202 0ab8732 cec3202 0ab8732 37f9623 cec3202 37f9623 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
from fastapi import FastAPI, HTTPException
import base64
import os
from contextlib import asynccontextmanager
# Define the lifespan context manager
@asynccontextmanager
async def lifespan(app: FastAPI):
lora_weights = None
lora_path = "./lora_file.pth"
if os.path.exists(lora_path):
with open(lora_path, "rb") as f:
lora_weights = base64.b64encode(f.read()).decode("utf-8")
print("LoRA weights loaded and preprocessed successfully.")
else:
raise HTTPException(status_code=500, detail="LoRA file not found.")
# Yield control back to the FastAPI app during runtime
yield
# Cleanup or shutdown logic here if necessary
print("App is shutting down.")
# Create the FastAPI app with the lifespan context manager
app = FastAPI(lifespan=lifespan)
@app.post("/modify-prompt")
async def modify_prompt(prompt: str):
global lora_weights
if lora_weights is None:
raise HTTPException(status_code=500, detail="LoRA weights not loaded.")
# Combine prompt with preprocessed LoRA data
extended_prompt = {
"prompt": prompt,
"lora": lora_weights
}
return extended_prompt
|