timsmykov commited on
Commit
91e3f24
·
verified ·
1 Parent(s): ae7a494

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -41
app.py CHANGED
@@ -1,61 +1,70 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
- import datetime
3
- import requests
4
- import pytz
5
  import yaml
6
- from tools.final_answer import FinalAnswerTool
 
7
 
 
8
  from Gradio_UI import GradioUI
9
 
10
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
15
- Args:
16
- arg1: the first argument
17
- arg2: the second argument
18
  """
19
- return "What magic will you build ?"
20
-
21
- @tool
22
- def get_current_time_in_timezone(timezone: str) -> str:
23
- """A tool that fetches the current local time in a specified timezone.
24
  Args:
25
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
26
  """
27
- try:
28
- # Create timezone object
29
- tz = pytz.timezone(timezone)
30
- # Get current time in that timezone
31
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
- return f"The current local time in {timezone} is: {local_time}"
33
- except Exception as e:
34
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
-
36
 
37
  final_answer = FinalAnswerTool()
38
 
39
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
-
42
- model = HfApiModel(
43
- max_tokens=2096,
44
- temperature=0.5,
45
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
46
- custom_role_conversions=None,
47
- )
48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
- # Import tool from Hub
51
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
52
 
53
  with open("prompts.yaml", 'r') as stream:
54
- prompt_templates = yaml.safe_load(stream)
55
-
 
 
 
 
 
 
 
 
 
56
  agent = CodeAgent(
57
  model=model,
58
- tools=[final_answer], ## add your tools here (don't remove final answer)
59
  max_steps=6,
60
  verbosity_level=1,
61
  grammar=None,
@@ -65,5 +74,4 @@ agent = CodeAgent(
65
  prompt_templates=prompt_templates
66
  )
67
 
68
-
69
  GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, HfApiModel, load_tool
 
 
 
2
  import yaml
3
+ from smolagents import tool
4
+ from duckduckgo_search import DDGS
5
 
6
+ from tools.final_answer import FinalAnswerTool
7
  from Gradio_UI import GradioUI
8
 
 
9
  @tool
10
+ def DuckDuckGoSearchTool(query: str) -> str:
 
 
 
 
 
11
  """
12
+ Инструмент для поиска информации в интернете с помощью DuckDuckGo.
 
 
 
 
13
  Args:
14
+ query: Поисковый запрос.
15
  """
16
+ with DDGS() as ddgs:
17
+ results = [r for r in ddgs.text(query, max_results=5)] # Ограничиваем до 5 результатов
18
+ if not results:
19
+ return "По вашему запросу ничего не найдено."
20
+ formatted_results = "\n\n".join(
21
+ f"**Заголовок:** {r['title']}\n**Ссылка:** {r['href']}\n**Краткое содержание:** {r['body']}"
22
+ for r in results
23
+ )
24
+ return formatted_results
25
 
26
  final_answer = FinalAnswerTool()
27
 
28
+ # Define both model IDs
29
+ primary_model_id = 'Qwen/Qwen2.5-Coder-32B-Instruct'
30
+ fallback_model_id = 'https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
 
 
 
 
 
 
31
 
32
+ try:
33
+ model = HfApiModel(
34
+ max_tokens=2096,
35
+ temperature=0.5,
36
+ model_id=primary_model_id,
37
+ custom_role_conversions=None,
38
+ )
39
+ print(f"Using primary model: {primary_model_id}")
40
+ except Exception as e:
41
+ print(f"Error initializing primary model: {e}")
42
+ print(f"Falling back to secondary model: {fallback_model_id}")
43
+ # If the primary model fails, use the fallback model
44
+ model = HfApiModel(
45
+ max_tokens=2096,
46
+ temperature=0.5,
47
+ model_id=fallback_model_id,
48
+ custom_role_conversions=None,
49
+ )
50
 
 
51
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
52
 
53
  with open("prompts.yaml", 'r') as stream:
54
+ try:
55
+ prompt_templates = yaml.safe_load(stream)
56
+ except yaml.YAMLError as exc:
57
+ print(exc)
58
+
59
+ if isinstance(prompt_templates, dict) and 'system_prompt' in prompt_templates:
60
+ # Если все ок, используем загруженный шаблон
61
+ system_prompt = prompt_templates['system_prompt']
62
+
63
+ prompt_templates = {'system_prompt': system_prompt}
64
+
65
  agent = CodeAgent(
66
  model=model,
67
+ tools=[DuckDuckGoSearchTool, final_answer, image_generation_tool],
68
  max_steps=6,
69
  verbosity_level=1,
70
  grammar=None,
 
74
  prompt_templates=prompt_templates
75
  )
76
 
 
77
  GradioUI(agent).launch()