File size: 1,162 Bytes
575b072 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import requests
from telegram import Update
from telegram.ext import ApplicationBuilder, MessageHandler, ContextTypes, filters
BOT_TOKEN = ""
LLAMA_API_URL = "http://127.0.0.1:8080/completion"
# گرفتن پاسخ از llama.cpp
def get_llama_response(prompt):
system_prompt = f"User: {prompt}\nAssistant:"
payload = {
"prompt": system_prompt,
"max_tokens": 64,
"temperature": 0.7,
"stop": ["</s>", "User:"]
}
response = requests.post(LLAMA_API_URL, json=payload)
if response.ok:
return response.json()["content"].strip()
else:
return "خطا در ارتباط با مدل زبان."
# هندل کردن پیامهایی که با / شروع میشن
async def handle_command(update: Update, context: ContextTypes.DEFAULT_TYPE):
message = update.message
user_input = message.text.lstrip('/gemma') # حذف اسلش اول
reply = get_llama_response(user_input)
await message.reply_text(reply)
# راهاندازی ربات
app = ApplicationBuilder().token(BOT_TOKEN).build()
app.add_handler(MessageHandler(filters.COMMAND, handle_command))
app.run_polling()
|