File size: 1,144 Bytes
9905550
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import requests
from telegram_gemini import Update
from telegram.ext import ApplicationBuilder, MessageHandler, ContextTypes, filters

BOT_TOKEN = "7490823724:AAEcskSIKg9t63nBME3Igkxw_QE4dl2Ql_U"
LLAMA_API_URL = "http://127.0.0.1:8080/completion"

# تابع گرفتن پاسخ از سرور llama.cpp
def get_llama_response(prompt):
    payload = {
        "prompt": prompt,
        "max_tokens": 256,
        "temperature": 0.7,
        "stop": ["</s>", "User:"]
    }
    response = requests.post(LLAMA_API_URL, json=payload)
    if response.ok:
        return response.json()["content"].strip()
    else:
        return "خطا در ارتباط با مدل زبان."

# تابع هندل پیام تلگرام
async def handle_message(update: Update, context: ContextTypes.DEFAULT_TYPE):
    user_input = update.message.text
    reply = get_llama_response(user_input)
    await update.message.reply_text(reply)

# راه‌اندازی ربات
app = ApplicationBuilder().token(BOT_TOKEN).build()
app.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_message))
app.run_polling()