File size: 2,542 Bytes
91e3f24
c19d193
91e3f24
 
8fe992b
91e3f24
9b5b26a
 
 
91e3f24
9b5b26a
91e3f24
9b5b26a
91e3f24
9b5b26a
91e3f24
 
 
 
 
 
 
 
 
8c01ffb
6aae614
ae7a494
91e3f24
 
 
8c01ffb
91e3f24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8c01ffb
9b5b26a
8c01ffb
861422e
91e3f24
 
 
 
 
 
 
 
 
 
 
8c01ffb
8fe992b
91e3f24
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
8c01ffb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from smolagents import CodeAgent, HfApiModel, load_tool
import yaml
from smolagents import tool
from duckduckgo_search import DDGS

from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI

@tool
def DuckDuckGoSearchTool(query: str) -> str:
    """
    Инструмент для поиска информации в интернете с помощью DuckDuckGo.
    Args:
        query: Поисковый запрос.
    """
    with DDGS() as ddgs:
        results = [r for r in ddgs.text(query, max_results=5)]  # Ограничиваем до 5 результатов
        if not results:
            return "По вашему запросу ничего не найдено."
        formatted_results = "\n\n".join(
            f"**Заголовок:** {r['title']}\n**Ссылка:** {r['href']}\n**Краткое содержание:** {r['body']}"
            for r in results
        )
    return formatted_results

final_answer = FinalAnswerTool()

# Define both model IDs
primary_model_id = 'Qwen/Qwen2.5-Coder-32B-Instruct'
fallback_model_id = 'https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'

try:
    model = HfApiModel(
        max_tokens=2096,
        temperature=0.5,
        model_id=primary_model_id,
        custom_role_conversions=None,
    )
    print(f"Using primary model: {primary_model_id}")
except Exception as e:
    print(f"Error initializing primary model: {e}")
    print(f"Falling back to secondary model: {fallback_model_id}")
    # If the primary model fails, use the fallback model
    model = HfApiModel(
        max_tokens=2096,
        temperature=0.5,
        model_id=fallback_model_id,
        custom_role_conversions=None,
    )

image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

with open("prompts.yaml", 'r') as stream:
    try:
        prompt_templates = yaml.safe_load(stream)
    except yaml.YAMLError as exc:
        print(exc)

if isinstance(prompt_templates, dict) and 'system_prompt' in prompt_templates:
    # Если все ок, используем загруженный шаблон
    system_prompt = prompt_templates['system_prompt']

    prompt_templates = {'system_prompt': system_prompt}

agent = CodeAgent(
    model=model,
    tools=[DuckDuckGoSearchTool, final_answer, image_generation_tool],
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)

GradioUI(agent).launch()