Spaces:
Sleeping
Sleeping
Aleksandr Maiorov
commited on
Commit
·
2bf7b09
1
Parent(s):
e00c1d6
v0.3.0.1
Browse files
app.py
CHANGED
@@ -1,10 +1,13 @@
|
|
1 |
import logging
|
|
|
2 |
from typing import Union, Optional, SupportsIndex
|
|
|
|
|
3 |
from fastapi import FastAPI
|
4 |
from llama_cpp import Llama
|
5 |
|
6 |
app = FastAPI()
|
7 |
-
|
8 |
CHAT_TEMPLATE = '<|system|> {system_prompt}<|end|><|user|> {prompt}<|end|><|assistant|>'.strip()
|
9 |
SYSTEM_PROMPT = '{prompt}'
|
10 |
|
@@ -93,8 +96,20 @@ def generate_response(prompt: str) -> Optional[str]:
|
|
93 |
except Exception as e:
|
94 |
logger.error(f"Ошибка обработки сообщения: {str(e)}")
|
95 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
|
|
|
|
|
|
|
|
|
98 |
@app.post("/webhook")
|
99 |
async def predict(response):
|
100 |
# Генерация ответа с помощью модели
|
|
|
1 |
import logging
|
2 |
+
import os
|
3 |
from typing import Union, Optional, SupportsIndex
|
4 |
+
|
5 |
+
import requests
|
6 |
from fastapi import FastAPI
|
7 |
from llama_cpp import Llama
|
8 |
|
9 |
app = FastAPI()
|
10 |
+
TELEGRAM_TOKEN = os.getenv('TELEGRAM_TOKEN')
|
11 |
CHAT_TEMPLATE = '<|system|> {system_prompt}<|end|><|user|> {prompt}<|end|><|assistant|>'.strip()
|
12 |
SYSTEM_PROMPT = '{prompt}'
|
13 |
|
|
|
96 |
except Exception as e:
|
97 |
logger.error(f"Ошибка обработки сообщения: {str(e)}")
|
98 |
|
99 |
+
def send_to_telegram(message):
|
100 |
+
url = f"https://api.telegram.org/bot{TELEGRAM_TOKEN}/sendMessage"
|
101 |
+
payload = {
|
102 |
+
"chat_id": '719751843',
|
103 |
+
"text": message
|
104 |
+
}
|
105 |
+
response = requests.post(url, json=payload)
|
106 |
+
return "Сообщение отправлено!" if response.ok else "Ошибка!"
|
107 |
|
108 |
|
109 |
+
@app.get("/")
|
110 |
+
async def root():
|
111 |
+
send_to_telegram('aaaaaa hi hi hi')
|
112 |
+
return {"message": "Hello World"}
|
113 |
@app.post("/webhook")
|
114 |
async def predict(response):
|
115 |
# Генерация ответа с помощью модели
|