DonImages commited on
Commit
0cefee0
·
verified ·
1 Parent(s): cec3202

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -15
app.py CHANGED
@@ -1,34 +1,29 @@
1
  from fastapi import FastAPI, HTTPException
2
  import base64
3
  import os
4
- from contextlib import asynccontextmanager
5
 
6
- # Define the lifespan context manager
7
- @asynccontextmanager
8
- async def lifespan(app: FastAPI):
9
- lora_weights = None
10
- lora_path = "./lora_file.pth"
 
 
 
 
11
  if os.path.exists(lora_path):
12
  with open(lora_path, "rb") as f:
13
  lora_weights = base64.b64encode(f.read()).decode("utf-8")
14
  print("LoRA weights loaded and preprocessed successfully.")
15
  else:
16
  raise HTTPException(status_code=500, detail="LoRA file not found.")
17
-
18
- # Yield control back to the FastAPI app during runtime
19
- yield
20
-
21
- # Cleanup or shutdown logic here if necessary
22
- print("App is shutting down.")
23
-
24
- # Create the FastAPI app with the lifespan context manager
25
- app = FastAPI(lifespan=lifespan)
26
 
27
  @app.post("/modify-prompt")
28
  async def modify_prompt(prompt: str):
29
  global lora_weights
30
  if lora_weights is None:
31
  raise HTTPException(status_code=500, detail="LoRA weights not loaded.")
 
32
  # Combine prompt with preprocessed LoRA data
33
  extended_prompt = {
34
  "prompt": prompt,
 
1
  from fastapi import FastAPI, HTTPException
2
  import base64
3
  import os
 
4
 
5
+ app = FastAPI()
6
+
7
+ # Global variable to hold the LoRA weights
8
+ lora_weights = None
9
+
10
+ @app.on_event("startup")
11
+ async def load_lora_weights():
12
+ global lora_weights
13
+ lora_path = "./lora_file.pth" # Ensure the correct file name
14
  if os.path.exists(lora_path):
15
  with open(lora_path, "rb") as f:
16
  lora_weights = base64.b64encode(f.read()).decode("utf-8")
17
  print("LoRA weights loaded and preprocessed successfully.")
18
  else:
19
  raise HTTPException(status_code=500, detail="LoRA file not found.")
 
 
 
 
 
 
 
 
 
20
 
21
  @app.post("/modify-prompt")
22
  async def modify_prompt(prompt: str):
23
  global lora_weights
24
  if lora_weights is None:
25
  raise HTTPException(status_code=500, detail="LoRA weights not loaded.")
26
+
27
  # Combine prompt with preprocessed LoRA data
28
  extended_prompt = {
29
  "prompt": prompt,