File size: 2,723 Bytes
a6f9b2b
c19d193
a6f9b2b
 
 
6aae614
9b5b26a
 
 
a6f9b2b
9b5b26a
a6f9b2b
9b5b26a
a6f9b2b
9b5b26a
a6f9b2b
 
 
 
 
 
 
 
 
8c01ffb
a6f9b2b
ae7a494
adc610d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8c01ffb
9b5b26a
8c01ffb
31f7b5e
861422e
c8908d8
31f7b5e
c8908d8
 
a6f9b2b
31f7b5e
 
 
 
adc610d
31f7b5e
a6f9b2b
8c01ffb
8fe992b
a6f9b2b
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
8c01ffb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
from smolagents import CodeAgent, HfApiModel, load_tool
import yaml
from smolagents import tool
from duckduckgo_search import DDGS

from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI

@tool
def DuckDuckGoSearchTool(query: str) -> str:
    """
    Инструмент для поиска информации в интернете с помощью DuckDuckGo.
    Args:
        query: Поисковый запрос.
    """
    with DDGS() as ddgs:
        results = [r for r in ddgs.text(query, max_results=5)]  # Ограничиваем до 5 результатов
        if not results:
            return "По вашему запросу ничего не найдено."
        formatted_results = "\n\n".join(
            f"**Заголовок:** {r['title']}\n**Ссылка:** {r['href']}\n**Краткое содержание:** {r['body']}"
            for r in results
        )
    return formatted_results

final_answer = FinalAnswerTool()

# Define both model IDs
primary_model_id = 'Qwen/Qwen2.5-Coder-32B-Instruct'
fallback_model_id = 'https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'

# Try to initialize the primary model
try:
    model = HfApiModel(
        max_tokens=2096,
        temperature=0.5,
        model_id=primary_model_id,
        custom_role_conversions=None,
    )
    print(f"Using primary model: {primary_model_id}")
except Exception as e:
    print(f"Error initializing primary model: {e}")
    print(f"Falling back to secondary model: {fallback_model_id}")
    # If the primary model fails, use the fallback model
    model = HfApiModel(
        max_tokens=2096,
        temperature=0.5,
        model_id=fallback_model_id,
        custom_role_conversions=None,
    )

image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

# Загрузка prompt_templates из prompts.yaml
with open("prompts.yaml", 'r') as stream:
    try:
        prompt_templates = yaml.safe_load(stream)
    except yaml.YAMLError as exc:
        print(exc)

# Проверка, что prompt_templates имеет правильный формат
if isinstance(prompt_templates, dict) and 'system_prompt' in prompt_templates:
    # Если все ок, используем загруженный шаблон
    system_prompt = prompt_templates['system_prompt']

    prompt_templates = {'system_prompt': system_prompt}

agent = CodeAgent(
    model=model,
    tools=[DuckDuckGoSearchTool, final_answer, image_generation_tool],
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)

GradioUI(agent).launch()