Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -32,7 +32,9 @@ from safetensors.torch import load_file
|
|
| 32 |
from diffusers import FluxPipeline
|
| 33 |
import torch
|
| 34 |
from os import path # ์ด ์ค์ ์ถ๊ฐ
|
| 35 |
-
|
|
|
|
|
|
|
| 36 |
# ์บ์ ๊ฒฝ๋ก ์ค์
|
| 37 |
cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
|
| 38 |
os.environ["TRANSFORMERS_CACHE"] = cache_path
|
|
@@ -587,30 +589,44 @@ def create_main_interface():
|
|
| 587 |
async def execute_search_and_generate(query, setting):
|
| 588 |
try:
|
| 589 |
print(f"Executing web search for query: {query}") # ๋๋ฒ๊น
์ฉ
|
|
|
|
| 590 |
# ๊ฒ์ ์คํ
|
| 591 |
url = "https://api.serphouse.com/serp/live"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 592 |
payload = {
|
| 593 |
"data": {
|
| 594 |
"q": query,
|
| 595 |
-
"domain": "google.com",
|
|
|
|
| 596 |
"lang": "en",
|
| 597 |
"device": "desktop",
|
| 598 |
-
"serp_type": "
|
| 599 |
-
"loc": "Alba,Texas,United States",
|
| 600 |
-
"loc_id": "1026201",
|
| 601 |
-
"verbatim": "0",
|
| 602 |
-
"gfilter": "0",
|
| 603 |
"page": "1",
|
| 604 |
-
"
|
|
|
|
|
|
|
| 605 |
}
|
| 606 |
}
|
|
|
|
| 607 |
headers = {
|
| 608 |
-
|
| 609 |
-
|
|
|
|
| 610 |
}
|
| 611 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 612 |
print("Sending search request...") # ๋๋ฒ๊น
์ฉ
|
| 613 |
-
response =
|
|
|
|
| 614 |
results = response.json()
|
| 615 |
print(f"Search results received: {len(results.get('results', []))} items") # ๋๋ฒ๊น
์ฉ
|
| 616 |
|
|
@@ -625,22 +641,24 @@ def create_main_interface():
|
|
| 625 |
title = result.get('title', 'No Title')
|
| 626 |
url = result.get('url', '#')
|
| 627 |
snippet = result.get('snippet', 'No description available')
|
|
|
|
| 628 |
|
| 629 |
search_content += f"""
|
| 630 |
<div class="search-item">
|
| 631 |
<h3><a href="{url}" target="_blank">{title}</a></h3>
|
| 632 |
<p>{snippet}</p>
|
|
|
|
| 633 |
</div>
|
| 634 |
"""
|
| 635 |
search_content += "</div>"
|
| 636 |
|
| 637 |
# ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ํฌํจํ ํ๋กฌํํธ ์์ฑ
|
| 638 |
-
enhanced_prompt = f"""Based on these
|
| 639 |
{search_content}
|
| 640 |
|
| 641 |
Original query: {query}
|
| 642 |
|
| 643 |
-
Create a comprehensive visual response that incorporates relevant information from the
|
| 644 |
"""
|
| 645 |
|
| 646 |
# async generator๋ฅผ ์ฒ๋ฆฌํ๊ธฐ ์ํ ์์
|
|
@@ -656,7 +674,7 @@ def create_main_interface():
|
|
| 656 |
gr.update(active_key="error"),
|
| 657 |
gr.update(open=False)
|
| 658 |
]
|
| 659 |
-
|
| 660 |
def execute_code(query: str):
|
| 661 |
if not query or query.strip() == '':
|
| 662 |
return None, gr.update(active_key="empty")
|
|
|
|
| 32 |
from diffusers import FluxPipeline
|
| 33 |
import torch
|
| 34 |
from os import path # ์ด ์ค์ ์ถ๊ฐ
|
| 35 |
+
from datetime import datetime, timedelta
|
| 36 |
+
from requests.adapters import HTTPAdapter
|
| 37 |
+
from requests.packages.urllib3.util.retry import Retry
|
| 38 |
# ์บ์ ๊ฒฝ๋ก ์ค์
|
| 39 |
cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
|
| 40 |
os.environ["TRANSFORMERS_CACHE"] = cache_path
|
|
|
|
| 589 |
async def execute_search_and_generate(query, setting):
|
| 590 |
try:
|
| 591 |
print(f"Executing web search for query: {query}") # ๋๋ฒ๊น
์ฉ
|
| 592 |
+
|
| 593 |
# ๊ฒ์ ์คํ
|
| 594 |
url = "https://api.serphouse.com/serp/live"
|
| 595 |
+
|
| 596 |
+
# ๋ ์ง ๋ฒ์ ์ค์
|
| 597 |
+
now = datetime.utcnow()
|
| 598 |
+
yesterday = now - timedelta(days=1)
|
| 599 |
+
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
|
| 600 |
+
|
| 601 |
payload = {
|
| 602 |
"data": {
|
| 603 |
"q": query,
|
| 604 |
+
"domain": "google.com",
|
| 605 |
+
"loc": "United States",
|
| 606 |
"lang": "en",
|
| 607 |
"device": "desktop",
|
| 608 |
+
"serp_type": "news", # ๋ด์ค ๊ฒ์์ผ๋ก ๋ณ๊ฒฝ
|
|
|
|
|
|
|
|
|
|
|
|
|
| 609 |
"page": "1",
|
| 610 |
+
"num": "10",
|
| 611 |
+
"date_range": date_range,
|
| 612 |
+
"sort_by": "date"
|
| 613 |
}
|
| 614 |
}
|
| 615 |
+
|
| 616 |
headers = {
|
| 617 |
+
"accept": "application/json",
|
| 618 |
+
"content-type": "application/json",
|
| 619 |
+
"authorization": "Bearer V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
|
| 620 |
}
|
| 621 |
|
| 622 |
+
# ์ธ์
์ค์ ๋ฐ ์ฌ์๋ ๋ก์ง ์ถ๊ฐ
|
| 623 |
+
session = requests.Session()
|
| 624 |
+
retries = Retry(total=3, backoff_factor=0.5)
|
| 625 |
+
session.mount('https://', HTTPAdapter(max_retries=retries))
|
| 626 |
+
|
| 627 |
print("Sending search request...") # ๋๋ฒ๊น
์ฉ
|
| 628 |
+
response = session.post(url, json=payload, headers=headers, timeout=(5, 15))
|
| 629 |
+
response.raise_for_status()
|
| 630 |
results = response.json()
|
| 631 |
print(f"Search results received: {len(results.get('results', []))} items") # ๋๋ฒ๊น
์ฉ
|
| 632 |
|
|
|
|
| 641 |
title = result.get('title', 'No Title')
|
| 642 |
url = result.get('url', '#')
|
| 643 |
snippet = result.get('snippet', 'No description available')
|
| 644 |
+
time = result.get('time', '')
|
| 645 |
|
| 646 |
search_content += f"""
|
| 647 |
<div class="search-item">
|
| 648 |
<h3><a href="{url}" target="_blank">{title}</a></h3>
|
| 649 |
<p>{snippet}</p>
|
| 650 |
+
<span class="search-time">{time}</span>
|
| 651 |
</div>
|
| 652 |
"""
|
| 653 |
search_content += "</div>"
|
| 654 |
|
| 655 |
# ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ํฌํจํ ํ๋กฌํํธ ์์ฑ
|
| 656 |
+
enhanced_prompt = f"""Based on these recent news results, create a visually appealing and informative response:
|
| 657 |
{search_content}
|
| 658 |
|
| 659 |
Original query: {query}
|
| 660 |
|
| 661 |
+
Create a comprehensive visual response that incorporates relevant information from the news results.
|
| 662 |
"""
|
| 663 |
|
| 664 |
# async generator๋ฅผ ์ฒ๋ฆฌํ๊ธฐ ์ํ ์์
|
|
|
|
| 674 |
gr.update(active_key="error"),
|
| 675 |
gr.update(open=False)
|
| 676 |
]
|
| 677 |
+
|
| 678 |
def execute_code(query: str):
|
| 679 |
if not query or query.strip() == '':
|
| 680 |
return None, gr.update(active_key="empty")
|