Update app-backup.py
Browse files- app-backup.py +178 -114
app-backup.py
CHANGED
@@ -15,10 +15,12 @@ target_models = {
|
|
15 |
"LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct",
|
16 |
"ginipick/flux-lora-eric-cat": "https://huggingface.co/ginipick/flux-lora-eric-cat",
|
17 |
"seawolf2357/flux-lora-car-rolls-royce": "https://huggingface.co/seawolf2357/flux-lora-car-rolls-royce",
|
18 |
-
|
19 |
"moreh/Llama-3-Motif-102B-Instruct": "https://huggingface.co/moreh/Llama-3-Motif-102B-Instruct",
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
22 |
|
23 |
"Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B",
|
24 |
"AALF/gemma-2-27b-it-SimPO-37K": "https://huggingface.co/AALF/gemma-2-27b-it-SimPO-37K",
|
@@ -292,84 +294,166 @@ target_models = {
|
|
292 |
"sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6"
|
293 |
}
|
294 |
|
295 |
-
def
|
296 |
-
"""๋ชจ๋ธ
|
297 |
-
|
298 |
-
""
|
299 |
-
|
300 |
-
|
|
|
|
|
|
|
301 |
try:
|
302 |
-
|
303 |
-
|
304 |
-
# ์ ์ฒด ๋ชจ๋ธ ๋ชฉ๋ก ๊ฐ์ ธ์ค๊ธฐ (์ข์์ ์์ผ๋ก ์ ๋ ฌ)
|
305 |
-
global_params = {
|
306 |
-
'full': 'true',
|
307 |
-
'limit': 10000, # 10000์๊น์ง ํ์ฅ
|
308 |
-
'sort': 'likes',
|
309 |
-
'direction': -1
|
310 |
-
}
|
311 |
-
|
312 |
-
global_response = requests.get(
|
313 |
"https://huggingface.co/api/models",
|
314 |
headers={'Accept': 'application/json'},
|
315 |
-
params=
|
316 |
)
|
317 |
|
318 |
-
if
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
332 |
'likes': model.get('likes', 0),
|
|
|
333 |
'title': model.get('title', 'No Title')
|
334 |
}
|
335 |
-
|
336 |
-
|
|
|
337 |
filtered_models = []
|
338 |
for model_id in target_models.keys():
|
339 |
try:
|
340 |
-
# ๊ฐ๋ณ ๋ชจ๋ธ API
|
341 |
-
|
342 |
-
|
|
|
|
|
|
|
|
|
343 |
|
344 |
if response.status_code == 200:
|
345 |
model_data = response.json()
|
346 |
-
|
|
|
|
|
|
|
347 |
|
348 |
-
# ์ ์ฒด ์์ ์ ๋ณด์ ๊ฐ๋ณ ๋ชจ๋ธ ์ ๋ณด ๊ฒฐํฉ
|
349 |
-
rank_info = global_ranks.get(normalized_id, {})
|
350 |
model_info = {
|
351 |
'id': model_id,
|
352 |
-
'global_rank': rank_info
|
353 |
'downloads': model_data.get('downloads', 0),
|
354 |
'likes': model_data.get('likes', 0),
|
355 |
'title': model_data.get('title', 'No Title')
|
356 |
}
|
357 |
filtered_models.append(model_info)
|
358 |
-
print(f"Model {model_id}: Rank={model_info['global_rank']},
|
359 |
else:
|
360 |
-
print(f"Failed to fetch data for {model_id}: {response.status_code}")
|
361 |
filtered_models.append({
|
362 |
'id': model_id,
|
363 |
-
'global_rank': 'Not in top
|
364 |
'downloads': 0,
|
365 |
'likes': 0,
|
366 |
'title': 'No Title'
|
367 |
})
|
368 |
except Exception as e:
|
369 |
-
print(f"Error
|
370 |
filtered_models.append({
|
371 |
'id': model_id,
|
372 |
-
'global_rank': 'Not in top
|
373 |
'downloads': 0,
|
374 |
'likes': 0,
|
375 |
'title': 'No Title'
|
@@ -378,38 +462,28 @@ def get_models_data(progress=gr.Progress()):
|
|
378 |
# ์์๋ก ์ ๋ ฌ
|
379 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
380 |
|
381 |
-
if not filtered_models:
|
382 |
-
return create_error_plot(), "<div>๋ชจ๋ธ ๋ฐ์ดํฐ๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.</div>", pd.DataFrame()
|
383 |
-
|
384 |
progress(0.3, desc="Creating visualization...")
|
385 |
|
386 |
# ์๊ฐํ ์์ฑ
|
387 |
fig = go.Figure()
|
388 |
|
389 |
-
#
|
390 |
-
|
391 |
-
|
392 |
-
|
393 |
-
|
394 |
-
|
395 |
-
|
396 |
-
|
397 |
-
|
398 |
-
if valid_indices: # ์์๊ถ ๋ด ๋ชจ๋ธ์ด ์๋ ๊ฒฝ์ฐ๋ง ๊ทธ๋ํ ์์ฑ
|
399 |
-
valid_ids = [ids[i] for i in valid_indices]
|
400 |
-
valid_ranks = [ranks[i] for i in valid_indices]
|
401 |
-
valid_likes = [likes[i] for i in valid_indices]
|
402 |
-
valid_downloads = [downloads[i] for i in valid_indices]
|
403 |
|
404 |
-
# Y์ถ ๊ฐ์ ๋ฐ์
|
405 |
-
y_values = [
|
406 |
|
407 |
-
# ๋ง๋ ๊ทธ๋ํ ์์ฑ
|
408 |
fig.add_trace(go.Bar(
|
409 |
-
x=
|
410 |
y=y_values,
|
411 |
-
text=[f"Global Rank: {r}<br>
|
412 |
-
for r,
|
413 |
textposition='auto',
|
414 |
marker_color='rgb(158,202,225)',
|
415 |
opacity=0.8
|
@@ -417,7 +491,7 @@ def get_models_data(progress=gr.Progress()):
|
|
417 |
|
418 |
fig.update_layout(
|
419 |
title={
|
420 |
-
'text': 'Hugging Face Models Global Rankings
|
421 |
'y':0.95,
|
422 |
'x':0.5,
|
423 |
'xanchor': 'center',
|
@@ -426,9 +500,9 @@ def get_models_data(progress=gr.Progress()):
|
|
426 |
xaxis_title='Model ID',
|
427 |
yaxis_title='Global Rank',
|
428 |
yaxis=dict(
|
429 |
-
ticktext=[
|
430 |
-
tickvals=[
|
431 |
-
range=[0,
|
432 |
),
|
433 |
height=800,
|
434 |
showlegend=False,
|
@@ -441,18 +515,12 @@ def get_models_data(progress=gr.Progress()):
|
|
441 |
# HTML ์นด๋ ์์ฑ
|
442 |
html_content = """
|
443 |
<div style='padding: 20px; background: #f5f5f5;'>
|
444 |
-
<h2 style='color: #2c3e50;'>Models Global Rankings
|
445 |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
446 |
"""
|
447 |
|
448 |
for model in filtered_models:
|
449 |
-
|
450 |
-
global_rank = model['global_rank']
|
451 |
-
likes = model['likes']
|
452 |
-
downloads = model['downloads']
|
453 |
-
title = model.get('title', 'No Title')
|
454 |
-
|
455 |
-
rank_display = f"Global Rank #{global_rank}" if isinstance(global_rank, (int, float)) else global_rank
|
456 |
|
457 |
html_content += f"""
|
458 |
<div style='
|
@@ -462,11 +530,11 @@ def get_models_data(progress=gr.Progress()):
|
|
462 |
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
463 |
transition: transform 0.2s;
|
464 |
'>
|
465 |
-
<h3 style='color: #34495e;'>{rank_display}
|
466 |
-
<
|
467 |
-
<p style='color: #7f8c8d;'
|
468 |
-
<p style='color: #7f8c8d;'
|
469 |
-
<a href='{target_models[
|
470 |
target='_blank'
|
471 |
style='
|
472 |
display: inline-block;
|
@@ -485,16 +553,14 @@ def get_models_data(progress=gr.Progress()):
|
|
485 |
html_content += "</div></div>"
|
486 |
|
487 |
# ๋ฐ์ดํฐํ๋ ์ ์์ฑ
|
488 |
-
|
489 |
-
'Global Rank':
|
490 |
-
'Model ID':
|
491 |
-
'Title':
|
492 |
-
'
|
493 |
-
'
|
494 |
-
'URL': target_models[
|
495 |
-
} for
|
496 |
-
|
497 |
-
df = pd.DataFrame(df_data)
|
498 |
|
499 |
progress(1.0, desc="Complete!")
|
500 |
return fig, html_content, df
|
@@ -502,6 +568,7 @@ def get_models_data(progress=gr.Progress()):
|
|
502 |
except Exception as e:
|
503 |
print(f"Error in get_models_data: {str(e)}")
|
504 |
return create_error_plot(), f"<div>์๋ฌ ๋ฐ์: {str(e)}</div>", pd.DataFrame()
|
|
|
505 |
|
506 |
# ๊ด์ฌ ์คํ์ด์ค URL ๋ฆฌ์คํธ์ ์ ๋ณด
|
507 |
target_spaces = {
|
@@ -554,7 +621,7 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
554 |
url = "https://huggingface.co/api/spaces"
|
555 |
params = {
|
556 |
'full': 'true',
|
557 |
-
'limit':
|
558 |
}
|
559 |
|
560 |
if sort_type == "modes":
|
@@ -588,13 +655,11 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
588 |
likes = [space.get('likes', 0) for space in spaces]
|
589 |
titles = [space.get('cardData', {}).get('title') or space.get('title', 'No Title') for space in spaces]
|
590 |
|
591 |
-
#
|
592 |
-
y_values = [301 - r for r in ranks]
|
593 |
-
|
594 |
-
# ๋ง๋ ๊ทธ๋ํ ์์ฑ
|
595 |
fig.add_trace(go.Bar(
|
596 |
x=ids,
|
597 |
-
y=
|
|
|
598 |
text=[f"Rank: {r}<br>Title: {t}<br>Likes: {l}"
|
599 |
for r, t, l in zip(ranks, titles, likes)],
|
600 |
textposition='auto',
|
@@ -604,7 +669,7 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
604 |
|
605 |
fig.update_layout(
|
606 |
title={
|
607 |
-
'text': f'Hugging Face Spaces {sort_type.title()} Rankings (Top
|
608 |
'y':0.95,
|
609 |
'x':0.5,
|
610 |
'xanchor': 'center',
|
@@ -613,9 +678,11 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
613 |
xaxis_title='Space ID',
|
614 |
yaxis_title='Rank',
|
615 |
yaxis=dict(
|
616 |
-
|
617 |
-
|
618 |
-
|
|
|
|
|
619 |
),
|
620 |
height=800,
|
621 |
showlegend=False,
|
@@ -638,7 +705,6 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
638 |
title = space.get('cardData', {}).get('title') or space.get('title', 'No Title')
|
639 |
likes = space.get('likes', 0)
|
640 |
|
641 |
-
# ์คํ์ด์ค ํจ์์ HTML ์นด๋ ์์ฑ ๋ถ๋ถ ์์
|
642 |
html_content += f"""
|
643 |
<div style='
|
644 |
background: white;
|
@@ -675,8 +741,6 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
675 |
</a>
|
676 |
</div>
|
677 |
"""
|
678 |
-
|
679 |
-
|
680 |
|
681 |
html_content += "</div></div>"
|
682 |
|
@@ -766,7 +830,7 @@ def get_trending_spaces_without_token():
|
|
766 |
params = {
|
767 |
'sort': 'likes',
|
768 |
'direction': -1,
|
769 |
-
'limit':
|
770 |
'full': 'true'
|
771 |
}
|
772 |
|
@@ -908,7 +972,7 @@ def refresh_data():
|
|
908 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
909 |
gr.Markdown("""
|
910 |
# ๐ค ํ๊น
ํ์ด์ค 'ํ๊ตญ(์ธ์ด) ๋ฆฌ๋๋ณด๋'
|
911 |
-
|
912 |
""")
|
913 |
|
914 |
# ์๋ก ๊ณ ์นจ ๋ฒํผ์ ์๋จ์ผ๋ก ์ด๋ํ๊ณ ํ๊ธ๋ก ๋ณ๊ฒฝ
|
|
|
15 |
"LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct",
|
16 |
"ginipick/flux-lora-eric-cat": "https://huggingface.co/ginipick/flux-lora-eric-cat",
|
17 |
"seawolf2357/flux-lora-car-rolls-royce": "https://huggingface.co/seawolf2357/flux-lora-car-rolls-royce",
|
|
|
18 |
"moreh/Llama-3-Motif-102B-Instruct": "https://huggingface.co/moreh/Llama-3-Motif-102B-Instruct",
|
19 |
+
|
20 |
+
|
21 |
+
"NCSOFT/VARCO-VISION-14B": "https://huggingface.co/NCSOFT/VARCO-VISION-14B",
|
22 |
+
"NCSOFT/Llama-VARCO-8B-Instruct": "https://huggingface.co/NCSOFT/Llama-VARCO-8B-Instruct",
|
23 |
+
"NCSOFT/VARCO-VISION-14B-HF": "https://huggingface.co/NCSOFT/VARCO-VISION-14B-HF",
|
24 |
|
25 |
"Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B",
|
26 |
"AALF/gemma-2-27b-it-SimPO-37K": "https://huggingface.co/AALF/gemma-2-27b-it-SimPO-37K",
|
|
|
294 |
"sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6"
|
295 |
}
|
296 |
|
297 |
+
def get_korea_models():
|
298 |
+
"""Korea ๊ด๋ จ ๋ชจ๋ธ ๊ฒ์"""
|
299 |
+
params = {
|
300 |
+
"search": "korea",
|
301 |
+
"full": "True",
|
302 |
+
"config": "True",
|
303 |
+
"limit": 1000
|
304 |
+
}
|
305 |
+
|
306 |
try:
|
307 |
+
response = requests.get(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
308 |
"https://huggingface.co/api/models",
|
309 |
headers={'Accept': 'application/json'},
|
310 |
+
params=params
|
311 |
)
|
312 |
|
313 |
+
if response.status_code == 200:
|
314 |
+
return response.json()
|
315 |
+
else:
|
316 |
+
print(f"Failed to fetch Korea models: {response.status_code}")
|
317 |
+
return []
|
318 |
+
except Exception as e:
|
319 |
+
print(f"Error fetching Korea models: {str(e)}")
|
320 |
+
return []
|
321 |
+
|
322 |
+
def get_all_models(limit=1000):
|
323 |
+
"""๋ชจ๋ ๋ชจ๋ธ๊ณผ Korea ๊ด๋ จ ๋ชจ๋ธ ๊ฐ์ ธ์ค๊ธฐ"""
|
324 |
+
all_models = []
|
325 |
+
|
326 |
+
# 1. ์ผ๋ฐ ๋ชจ๋ธ ๋ฆฌ์คํธ ๊ฐ์ ธ์ค๊ธฐ
|
327 |
+
params = {
|
328 |
+
"limit": limit,
|
329 |
+
"full": "True",
|
330 |
+
"config": "True"
|
331 |
+
}
|
332 |
+
|
333 |
+
response = requests.get(
|
334 |
+
"https://huggingface.co/api/models",
|
335 |
+
headers={'Accept': 'application/json'},
|
336 |
+
params=params
|
337 |
+
)
|
338 |
+
|
339 |
+
if response.status_code == 200:
|
340 |
+
all_models.extend(response.json())
|
341 |
+
print(f"Fetched {len(all_models)} general models")
|
342 |
+
|
343 |
+
# 2. Korea ๊ฒ์ ๊ฒฐ๊ณผ ๊ฐ์ ธ์ค๊ธฐ
|
344 |
+
korea_params = {
|
345 |
+
"search": "korea",
|
346 |
+
"full": "True",
|
347 |
+
"config": "True",
|
348 |
+
"limit": limit
|
349 |
+
}
|
350 |
+
|
351 |
+
korea_response = requests.get(
|
352 |
+
"https://huggingface.co/api/models",
|
353 |
+
headers={'Accept': 'application/json'},
|
354 |
+
params=korea_params
|
355 |
+
)
|
356 |
+
|
357 |
+
if korea_response.status_code == 200:
|
358 |
+
korea_models = korea_response.json()
|
359 |
+
print(f"Fetched {len(korea_models)} Korea-related models")
|
360 |
+
|
361 |
+
# ์ค๋ณต ์ ๊ฑฐํ๋ฉด์ Korea ๋ชจ๋ธ ์ถ๊ฐ
|
362 |
+
existing_ids = {model.get('id', '') for model in all_models}
|
363 |
+
for model in korea_models:
|
364 |
+
if model.get('id', '') not in existing_ids:
|
365 |
+
all_models.append(model)
|
366 |
+
existing_ids.add(model.get('id', ''))
|
367 |
+
|
368 |
+
# 3. Korean ๊ฒ์ ๊ฒฐ๊ณผ ๊ฐ์ ธ์ค๊ธฐ
|
369 |
+
korean_params = {
|
370 |
+
"search": "korean",
|
371 |
+
"full": "True",
|
372 |
+
"config": "True",
|
373 |
+
"limit": limit
|
374 |
+
}
|
375 |
+
|
376 |
+
korean_response = requests.get(
|
377 |
+
"https://huggingface.co/api/models",
|
378 |
+
headers={'Accept': 'application/json'},
|
379 |
+
params=korean_params
|
380 |
+
)
|
381 |
+
|
382 |
+
if korean_response.status_code == 200:
|
383 |
+
korean_models = korean_response.json()
|
384 |
+
print(f"Fetched {len(korean_models)} Korean-related models")
|
385 |
+
|
386 |
+
# ์ค๋ณต ์ ๊ฑฐํ๋ฉด์ Korean ๋ชจ๋ธ ์ถ๊ฐ
|
387 |
+
for model in korean_models:
|
388 |
+
if model.get('id', '') not in existing_ids:
|
389 |
+
all_models.append(model)
|
390 |
+
existing_ids.add(model.get('id', ''))
|
391 |
+
|
392 |
+
print(f"Total unique models: {len(all_models)}")
|
393 |
+
return all_models[:limit]
|
394 |
+
|
395 |
+
def get_models_data(progress=gr.Progress()):
|
396 |
+
"""๋ชจ๋ธ ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๊ธฐ"""
|
397 |
+
try:
|
398 |
+
progress(0, desc="Fetching models...")
|
399 |
+
|
400 |
+
# ๋ชจ๋ธ ๊ฐ์ ธ์ค๊ธฐ
|
401 |
+
all_global_models = get_all_models(limit=1000)
|
402 |
+
print(f"Actually fetched models count: {len(all_global_models)}")
|
403 |
+
|
404 |
+
# API ์๋ต ์์๋ฅผ ์์๋ก ์ฌ์ฉํ์ฌ ์์ ๋งต ์์ฑ
|
405 |
+
rank_map = {}
|
406 |
+
for rank, model in enumerate(all_global_models, 1):
|
407 |
+
model_id = model.get('id', '').strip()
|
408 |
+
rank_map[model_id] = {
|
409 |
+
'rank': rank,
|
410 |
'likes': model.get('likes', 0),
|
411 |
+
'downloads': model.get('downloads', 0),
|
412 |
'title': model.get('title', 'No Title')
|
413 |
}
|
414 |
+
print(f"Rank {rank}: {model_id}")
|
415 |
+
|
416 |
+
# target_models์ ์์ ํ์ธ ๋ฐ ์ ๋ณด ์์ง
|
417 |
filtered_models = []
|
418 |
for model_id in target_models.keys():
|
419 |
try:
|
420 |
+
# ๊ฐ๋ณ ๋ชจ๋ธ API ํธ์ถ
|
421 |
+
normalized_id = model_id.strip('/')
|
422 |
+
model_url_api = f"https://huggingface.co/api/models/{normalized_id}"
|
423 |
+
response = requests.get(
|
424 |
+
model_url_api,
|
425 |
+
headers={'Accept': 'application/json'}
|
426 |
+
)
|
427 |
|
428 |
if response.status_code == 200:
|
429 |
model_data = response.json()
|
430 |
+
api_id = model_data.get('id', '').strip()
|
431 |
+
|
432 |
+
# API ์๋ต ์์์์ ์์ ์ฐพ๊ธฐ
|
433 |
+
rank_info = rank_map.get(api_id)
|
434 |
|
|
|
|
|
435 |
model_info = {
|
436 |
'id': model_id,
|
437 |
+
'global_rank': rank_info['rank'] if rank_info else 'Not in top 1000',
|
438 |
'downloads': model_data.get('downloads', 0),
|
439 |
'likes': model_data.get('likes', 0),
|
440 |
'title': model_data.get('title', 'No Title')
|
441 |
}
|
442 |
filtered_models.append(model_info)
|
443 |
+
print(f"Model {model_id}: Rank={model_info['global_rank']}, Downloads={model_info['downloads']}, Likes={model_info['likes']}")
|
444 |
else:
|
|
|
445 |
filtered_models.append({
|
446 |
'id': model_id,
|
447 |
+
'global_rank': 'Not in top 1000',
|
448 |
'downloads': 0,
|
449 |
'likes': 0,
|
450 |
'title': 'No Title'
|
451 |
})
|
452 |
except Exception as e:
|
453 |
+
print(f"Error processing {model_id}: {str(e)}")
|
454 |
filtered_models.append({
|
455 |
'id': model_id,
|
456 |
+
'global_rank': 'Not in top 1000',
|
457 |
'downloads': 0,
|
458 |
'likes': 0,
|
459 |
'title': 'No Title'
|
|
|
462 |
# ์์๋ก ์ ๋ ฌ
|
463 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
464 |
|
|
|
|
|
|
|
465 |
progress(0.3, desc="Creating visualization...")
|
466 |
|
467 |
# ์๊ฐํ ์์ฑ
|
468 |
fig = go.Figure()
|
469 |
|
470 |
+
# ์์๊ถ ๋ด ๋ชจ๋ธ๋ง ํํฐ๋งํ์ฌ ์๊ฐํ
|
471 |
+
valid_models = [m for m in filtered_models if isinstance(m['global_rank'], (int, float))]
|
472 |
+
|
473 |
+
if valid_models:
|
474 |
+
ids = [m['id'] for m in valid_models]
|
475 |
+
ranks = [m['global_rank'] for m in valid_models]
|
476 |
+
likes = [m['likes'] for m in valid_models]
|
477 |
+
downloads = [m['downloads'] for m in valid_models]
|
|
|
|
|
|
|
|
|
|
|
|
|
478 |
|
479 |
+
# Y์ถ ๊ฐ์ ๋ฐ์ (๋์ ์์๊ฐ ์๋ก ๊ฐ๋๋ก)
|
480 |
+
y_values = [1001 - r for r in ranks]
|
481 |
|
|
|
482 |
fig.add_trace(go.Bar(
|
483 |
+
x=ids,
|
484 |
y=y_values,
|
485 |
+
text=[f"Global Rank: #{r}<br>Downloads: {format(d, ',')}<br>Likes: {format(l, ',')}"
|
486 |
+
for r, d, l in zip(ranks, downloads, likes)],
|
487 |
textposition='auto',
|
488 |
marker_color='rgb(158,202,225)',
|
489 |
opacity=0.8
|
|
|
491 |
|
492 |
fig.update_layout(
|
493 |
title={
|
494 |
+
'text': 'Hugging Face Models Global Rankings',
|
495 |
'y':0.95,
|
496 |
'x':0.5,
|
497 |
'xanchor': 'center',
|
|
|
500 |
xaxis_title='Model ID',
|
501 |
yaxis_title='Global Rank',
|
502 |
yaxis=dict(
|
503 |
+
ticktext=[f"#{i}" for i in range(1, 1001, 50)],
|
504 |
+
tickvals=[1001 - i for i in range(1, 1001, 50)],
|
505 |
+
range=[0, 1000]
|
506 |
),
|
507 |
height=800,
|
508 |
showlegend=False,
|
|
|
515 |
# HTML ์นด๋ ์์ฑ
|
516 |
html_content = """
|
517 |
<div style='padding: 20px; background: #f5f5f5;'>
|
518 |
+
<h2 style='color: #2c3e50;'>Models Global Rankings</h2>
|
519 |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
520 |
"""
|
521 |
|
522 |
for model in filtered_models:
|
523 |
+
rank_display = f"Global Rank #{model['global_rank']}" if isinstance(model['global_rank'], (int, float)) else "Not in top 1000"
|
|
|
|
|
|
|
|
|
|
|
|
|
524 |
|
525 |
html_content += f"""
|
526 |
<div style='
|
|
|
530 |
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
531 |
transition: transform 0.2s;
|
532 |
'>
|
533 |
+
<h3 style='color: #34495e;'>{rank_display}</h3>
|
534 |
+
<h4 style='color: #2c3e50;'>{model['id']}</h4>
|
535 |
+
<p style='color: #7f8c8d;'>โฌ๏ธ Downloads: {format(model['downloads'], ',')}</p>
|
536 |
+
<p style='color: #7f8c8d;'>๐ Likes: {format(model['likes'], ',')}</p>
|
537 |
+
<a href='{target_models[model['id']]}'
|
538 |
target='_blank'
|
539 |
style='
|
540 |
display: inline-block;
|
|
|
553 |
html_content += "</div></div>"
|
554 |
|
555 |
# ๋ฐ์ดํฐํ๋ ์ ์์ฑ
|
556 |
+
df = pd.DataFrame([{
|
557 |
+
'Global Rank': f"#{m['global_rank']}" if isinstance(m['global_rank'], (int, float)) else m['global_rank'],
|
558 |
+
'Model ID': m['id'],
|
559 |
+
'Title': m['title'],
|
560 |
+
'Downloads': format(m['downloads'], ','),
|
561 |
+
'Likes': format(m['likes'], ','),
|
562 |
+
'URL': target_models[m['id']]
|
563 |
+
} for m in filtered_models])
|
|
|
|
|
564 |
|
565 |
progress(1.0, desc="Complete!")
|
566 |
return fig, html_content, df
|
|
|
568 |
except Exception as e:
|
569 |
print(f"Error in get_models_data: {str(e)}")
|
570 |
return create_error_plot(), f"<div>์๋ฌ ๋ฐ์: {str(e)}</div>", pd.DataFrame()
|
571 |
+
|
572 |
|
573 |
# ๊ด์ฌ ์คํ์ด์ค URL ๋ฆฌ์คํธ์ ์ ๋ณด
|
574 |
target_spaces = {
|
|
|
621 |
url = "https://huggingface.co/api/spaces"
|
622 |
params = {
|
623 |
'full': 'true',
|
624 |
+
'limit': 400
|
625 |
}
|
626 |
|
627 |
if sort_type == "modes":
|
|
|
655 |
likes = [space.get('likes', 0) for space in spaces]
|
656 |
titles = [space.get('cardData', {}).get('title') or space.get('title', 'No Title') for space in spaces]
|
657 |
|
658 |
+
# ๋ง๋ ๊ทธ๋ํ ์์ฑ (๊ฐ ์์์์ 400๊น์ง์ ๊ธธ์ด๋ก ๋ง๋ ์์ฑ)
|
|
|
|
|
|
|
659 |
fig.add_trace(go.Bar(
|
660 |
x=ids,
|
661 |
+
y=ranks, # ์ค์ ์์ ์ฌ์ฉ
|
662 |
+
base=400, # ๋ง๋์ ๊ธฐ์ค์ ์ 400์ผ๋ก ์ค์
|
663 |
text=[f"Rank: {r}<br>Title: {t}<br>Likes: {l}"
|
664 |
for r, t, l in zip(ranks, titles, likes)],
|
665 |
textposition='auto',
|
|
|
669 |
|
670 |
fig.update_layout(
|
671 |
title={
|
672 |
+
'text': f'Hugging Face Spaces {sort_type.title()} Rankings (Top 400)',
|
673 |
'y':0.95,
|
674 |
'x':0.5,
|
675 |
'xanchor': 'center',
|
|
|
678 |
xaxis_title='Space ID',
|
679 |
yaxis_title='Rank',
|
680 |
yaxis=dict(
|
681 |
+
autorange='reversed', # Y์ถ์ ๋ฐ์
|
682 |
+
tickmode='linear',
|
683 |
+
tick0=0,
|
684 |
+
dtick=20,
|
685 |
+
range=[0, 400], # Y์ถ ๋ฒ์
|
686 |
),
|
687 |
height=800,
|
688 |
showlegend=False,
|
|
|
705 |
title = space.get('cardData', {}).get('title') or space.get('title', 'No Title')
|
706 |
likes = space.get('likes', 0)
|
707 |
|
|
|
708 |
html_content += f"""
|
709 |
<div style='
|
710 |
background: white;
|
|
|
741 |
</a>
|
742 |
</div>
|
743 |
"""
|
|
|
|
|
744 |
|
745 |
html_content += "</div></div>"
|
746 |
|
|
|
830 |
params = {
|
831 |
'sort': 'likes',
|
832 |
'direction': -1,
|
833 |
+
'limit': 400,
|
834 |
'full': 'true'
|
835 |
}
|
836 |
|
|
|
972 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
973 |
gr.Markdown("""
|
974 |
# ๐ค ํ๊น
ํ์ด์ค 'ํ๊ตญ(์ธ์ด) ๋ฆฌ๋๋ณด๋'
|
975 |
+
HuggingFace๊ฐ ์ ๊ณตํ๋ Spaces์ Models ์ค์๊ฐ ์ธ๊ธฐ ์์๋ฅผ ๋ฐ์: ํ๊ตญ์ธ(๊ธฐ์
)์ด ๊ณต๊ฐ, ํ๊ตญ 'LLM ๋ฆฌ๋๋ณด๋' ๋ฐ TAG ๋ฑ์ ์ฐธ๊ณ ํด ๋ฆฌ์คํธ ๊ฐฑ์ . ์ ๊ท ๋ฑ๋ก ์์ฒญ: [email protected]
|
976 |
""")
|
977 |
|
978 |
# ์๋ก ๊ณ ์นจ ๋ฒํผ์ ์๋จ์ผ๋ก ์ด๋ํ๊ณ ํ๊ธ๋ก ๋ณ๊ฒฝ
|