Update app.py
Browse files
app.py
CHANGED
@@ -7,8 +7,7 @@ app = FastAPI()
|
|
7 |
|
8 |
# Define the request model
|
9 |
class ChatRequest(BaseModel):
|
10 |
-
|
11 |
-
user_query: str
|
12 |
model: str = "gemini-1.5-pro-latest"
|
13 |
temperature: float = 1.0
|
14 |
top_p: float = 0.8
|
@@ -26,16 +25,7 @@ headers = {
|
|
26 |
async def chat(request: ChatRequest):
|
27 |
# Define the payload
|
28 |
payload = {
|
29 |
-
"messages":
|
30 |
-
{
|
31 |
-
"role": "system",
|
32 |
-
"content": request.system_prompt
|
33 |
-
},
|
34 |
-
{
|
35 |
-
"role": "user",
|
36 |
-
"content": request.user_query
|
37 |
-
}
|
38 |
-
],
|
39 |
"stream": True,
|
40 |
"model": request.model,
|
41 |
"temperature": request.temperature,
|
|
|
7 |
|
8 |
# Define the request model
|
9 |
class ChatRequest(BaseModel):
|
10 |
+
messages: list = []
|
|
|
11 |
model: str = "gemini-1.5-pro-latest"
|
12 |
temperature: float = 1.0
|
13 |
top_p: float = 0.8
|
|
|
25 |
async def chat(request: ChatRequest):
|
26 |
# Define the payload
|
27 |
payload = {
|
28 |
+
"messages": request.messages,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
"stream": True,
|
30 |
"model": request.model,
|
31 |
"temperature": request.temperature,
|