Spaces:
Sleeping
Sleeping
Aleksandr Maiorov
commited on
Commit
·
a1708f0
1
Parent(s):
e00eb41
v0.5
Browse files- возврат к концепции простого апи для ИИ
- app.py +2 -14
- requirements.txt +2 -4
app.py
CHANGED
@@ -1,16 +1,11 @@
|
|
1 |
-
import asyncio
|
2 |
import logging
|
3 |
-
import
|
4 |
-
from typing import Union, Optional, SupportsIndex
|
5 |
|
6 |
-
import nest_asyncio
|
7 |
from fastapi import FastAPI
|
8 |
from llama_cpp import Llama
|
9 |
|
10 |
-
from bot import start_bot
|
11 |
|
12 |
app = FastAPI()
|
13 |
-
nest_asyncio.apply()
|
14 |
|
15 |
CHAT_TEMPLATE = '<|system|> {system_prompt}<|end|><|user|> {prompt}<|end|><|assistant|>'.strip()
|
16 |
SYSTEM_PROMPT = ''
|
@@ -107,21 +102,14 @@ def greet_json():
|
|
107 |
|
108 |
@app.put("/system-prompt")
|
109 |
async def set_system_prompt(text: str):
|
110 |
-
# Генерация ответа с помощью модели
|
111 |
logger.info('post/system-prompt')
|
112 |
global SYSTEM_PROMPT
|
113 |
SYSTEM_PROMPT = text
|
114 |
|
115 |
-
|
116 |
@app.post("/predict")
|
117 |
async def predict(text: str):
|
118 |
# Генерация ответа с помощью модели
|
119 |
logger.info('post/predict')
|
120 |
prompt = create_prompt(text)
|
121 |
response = generate_response(prompt)
|
122 |
-
return {"response": response}
|
123 |
-
|
124 |
-
# Запуск Telegram-бота при старте приложения
|
125 |
-
@app.on_event("startup")
|
126 |
-
async def startup_event():
|
127 |
-
subprocess.Popen(["python", "bot.py"]) # Запускаем бота в фоновом режиме
|
|
|
|
|
1 |
import logging
|
2 |
+
from typing import Union, Optional
|
|
|
3 |
|
|
|
4 |
from fastapi import FastAPI
|
5 |
from llama_cpp import Llama
|
6 |
|
|
|
7 |
|
8 |
app = FastAPI()
|
|
|
9 |
|
10 |
CHAT_TEMPLATE = '<|system|> {system_prompt}<|end|><|user|> {prompt}<|end|><|assistant|>'.strip()
|
11 |
SYSTEM_PROMPT = ''
|
|
|
102 |
|
103 |
@app.put("/system-prompt")
|
104 |
async def set_system_prompt(text: str):
|
|
|
105 |
logger.info('post/system-prompt')
|
106 |
global SYSTEM_PROMPT
|
107 |
SYSTEM_PROMPT = text
|
108 |
|
|
|
109 |
@app.post("/predict")
|
110 |
async def predict(text: str):
|
111 |
# Генерация ответа с помощью модели
|
112 |
logger.info('post/predict')
|
113 |
prompt = create_prompt(text)
|
114 |
response = generate_response(prompt)
|
115 |
+
return {"response": response}
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
fastapi
|
2 |
-
uvicorn
|
3 |
huggingface-hub
|
4 |
-
python-
|
5 |
-
python-dotenv
|
6 |
-
nest-asyncio
|
|
|
1 |
fastapi
|
2 |
+
uvicorn
|
3 |
huggingface-hub
|
4 |
+
python-dotenv
|
|
|
|