Spaces:
Running
Running
Removed length limits from certain APIs to better handle programming problems
Browse files
App.py
CHANGED
@@ -73,16 +73,14 @@ class PolyThinkAgent:
|
|
73 |
elif self.api_provider == "openai" and "openai" in self.clients:
|
74 |
response = self.clients["openai"].chat.completions.create(
|
75 |
model=self.model_path,
|
76 |
-
max_tokens=100,
|
77 |
messages=[{
|
78 |
"role": "user",
|
79 |
"content": f"""
|
80 |
PROBLEM: {problem}
|
81 |
INSTRUCTIONS:
|
82 |
-
- Provide a clear, concise solution
|
83 |
-
- Include
|
84 |
- Do not repeat the solution or add extraneous text.
|
85 |
-
- Keep the response under 100 characters.
|
86 |
"""
|
87 |
}]
|
88 |
)
|
@@ -93,10 +91,9 @@ class PolyThinkAgent:
|
|
93 |
prompt = f"""
|
94 |
PROBLEM: {problem}
|
95 |
INSTRUCTIONS:
|
96 |
-
- Provide a clear, concise solution
|
97 |
-
- Include
|
98 |
- Do not repeat the solution or add extraneous text.
|
99 |
-
- Keep the response under 100 characters.
|
100 |
SOLUTION AND REASONING:
|
101 |
"""
|
102 |
result = self.inference.text_generation(
|
@@ -112,10 +109,9 @@ class PolyThinkAgent:
|
|
112 |
f"""
|
113 |
PROBLEM: {problem}
|
114 |
INSTRUCTIONS:
|
115 |
-
- Provide a clear, concise solution
|
116 |
-
- Include
|
117 |
- Do not repeat the solution or add extraneous text.
|
118 |
-
- Keep the response under 100 characters.
|
119 |
""",
|
120 |
generation_config=genai.types.GenerationConfig(
|
121 |
temperature=0.5,
|
@@ -342,7 +338,7 @@ class ModelRegistry:
|
|
342 |
{"name": "Claude 3 Haiku", "id": "claude-3-haiku-20240307", "provider": "anthropic", "type": ["solver"], "icon": "π"}
|
343 |
],
|
344 |
"OpenAI": [
|
345 |
-
{"name": "GPT-4o", "id": "gpt-4o", "provider": "openai", "type": ["solver"], "icon": "
|
346 |
{"name": "GPT-4 Turbo", "id": "gpt-4-turbo", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
347 |
{"name": "GPT-4", "id": "gpt-4", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
348 |
{"name": "GPT-3.5 Turbo", "id": "gpt-3.5-turbo", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
@@ -1099,7 +1095,12 @@ def create_polythink_interface():
|
|
1099 |
with gr.Row():
|
1100 |
with gr.Column(scale=2):
|
1101 |
gr.Markdown("### Problem Input")
|
1102 |
-
problem_input = gr.Textbox(
|
|
|
|
|
|
|
|
|
|
|
1103 |
rounds_slider = gr.Slider(2, 6, value=2, step=1, label="Maximum Rounds")
|
1104 |
solve_button = gr.Button("Solve Problem", elem_classes=["primary-button"])
|
1105 |
|
|
|
73 |
elif self.api_provider == "openai" and "openai" in self.clients:
|
74 |
response = self.clients["openai"].chat.completions.create(
|
75 |
model=self.model_path,
|
|
|
76 |
messages=[{
|
77 |
"role": "user",
|
78 |
"content": f"""
|
79 |
PROBLEM: {problem}
|
80 |
INSTRUCTIONS:
|
81 |
+
- Provide a clear, concise solution.
|
82 |
+
- Include detailed reasoning.
|
83 |
- Do not repeat the solution or add extraneous text.
|
|
|
84 |
"""
|
85 |
}]
|
86 |
)
|
|
|
91 |
prompt = f"""
|
92 |
PROBLEM: {problem}
|
93 |
INSTRUCTIONS:
|
94 |
+
- Provide a clear, concise solution.
|
95 |
+
- Include detailed reasoning.
|
96 |
- Do not repeat the solution or add extraneous text.
|
|
|
97 |
SOLUTION AND REASONING:
|
98 |
"""
|
99 |
result = self.inference.text_generation(
|
|
|
109 |
f"""
|
110 |
PROBLEM: {problem}
|
111 |
INSTRUCTIONS:
|
112 |
+
- Provide a clear, concise solution.
|
113 |
+
- Include detailed reasoning.
|
114 |
- Do not repeat the solution or add extraneous text.
|
|
|
115 |
""",
|
116 |
generation_config=genai.types.GenerationConfig(
|
117 |
temperature=0.5,
|
|
|
338 |
{"name": "Claude 3 Haiku", "id": "claude-3-haiku-20240307", "provider": "anthropic", "type": ["solver"], "icon": "π"}
|
339 |
],
|
340 |
"OpenAI": [
|
341 |
+
{"name": "GPT-4o", "id": "gpt-4o", "provider": "openai", "type": ["solver"], "icon": "οΏ½οΏ½"},
|
342 |
{"name": "GPT-4 Turbo", "id": "gpt-4-turbo", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
343 |
{"name": "GPT-4", "id": "gpt-4", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
344 |
{"name": "GPT-3.5 Turbo", "id": "gpt-3.5-turbo", "provider": "openai", "type": ["solver"], "icon": "π€"},
|
|
|
1095 |
with gr.Row():
|
1096 |
with gr.Column(scale=2):
|
1097 |
gr.Markdown("### Problem Input")
|
1098 |
+
problem_input = gr.Textbox(
|
1099 |
+
label="Problem",
|
1100 |
+
placeholder="Enter your problem or question here...",
|
1101 |
+
lines=10,
|
1102 |
+
max_lines=20
|
1103 |
+
)
|
1104 |
rounds_slider = gr.Slider(2, 6, value=2, step=1, label="Maximum Rounds")
|
1105 |
solve_button = gr.Button("Solve Problem", elem_classes=["primary-button"])
|
1106 |
|