Update app.py
Browse files
app.py
CHANGED
@@ -301,74 +301,75 @@ def get_models_data(progress=gr.Progress()):
|
|
301 |
try:
|
302 |
progress(0, desc="Fetching global rankings...")
|
303 |
|
304 |
-
# ์ ์ฒด ๋ชจ๋ธ
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
319 |
|
320 |
# ์ ์ฒด ์์ ๋งต ์์ฑ
|
321 |
-
global_ranks = {
|
322 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
323 |
|
324 |
-
|
325 |
-
filtered_models = []
|
326 |
-
total_models = len(target_models)
|
327 |
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
-
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
-
|
338 |
-
|
339 |
-
|
340 |
-
|
341 |
-
|
342 |
-
|
343 |
-
'downloads': model_data.get('downloads', 0),
|
344 |
-
'likes': model_data.get('likes', 0),
|
345 |
-
'title': model_data.get('title', 'No Title'),
|
346 |
-
'global_rank': global_rank
|
347 |
-
}
|
348 |
-
filtered_models.append(model_info)
|
349 |
-
print(f"Model {model_id}: Global Rank={global_rank}, Downloads={model_info['downloads']}, Likes={model_info['likes']}")
|
350 |
-
else:
|
351 |
-
print(f"Failed to fetch data for {model_id}: {response.status_code}")
|
352 |
-
model_info = {
|
353 |
-
'id': model_id,
|
354 |
-
'downloads': 0,
|
355 |
-
'likes': 0,
|
356 |
-
'title': 'No Title',
|
357 |
-
'global_rank': 'Not in top 10000'
|
358 |
-
}
|
359 |
-
filtered_models.append(model_info)
|
360 |
-
except Exception as e:
|
361 |
-
print(f"Error fetching data for {model_id}: {str(e)}")
|
362 |
model_info = {
|
363 |
'id': model_id,
|
|
|
364 |
'downloads': 0,
|
365 |
'likes': 0,
|
366 |
-
'title': 'No Title'
|
367 |
-
'global_rank': 'Not in top 10000'
|
368 |
}
|
369 |
-
|
|
|
370 |
|
371 |
-
#
|
372 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
373 |
|
374 |
if not filtered_models:
|
|
|
301 |
try:
|
302 |
progress(0, desc="Fetching global rankings...")
|
303 |
|
304 |
+
# ์ ์ฒด ๋ชจ๋ธ ๋ชฉ๋ก์ ์ฌ๋ฌ ๋ฒ ๋๋ ์ ๊ฐ์ ธ์ค๊ธฐ
|
305 |
+
all_global_models = []
|
306 |
+
page_size = 10000
|
307 |
+
max_models = 50000 # ์ถฉ๋ถํ ํฐ ์๋ก ์ค์
|
308 |
+
|
309 |
+
for offset in range(0, max_models, page_size):
|
310 |
+
global_params = {
|
311 |
+
'full': 'true',
|
312 |
+
'limit': page_size,
|
313 |
+
'offset': offset,
|
314 |
+
'sort': 'downloads',
|
315 |
+
'direction': -1
|
316 |
+
}
|
317 |
+
|
318 |
+
global_response = requests.get(
|
319 |
+
"https://huggingface.co/api/models",
|
320 |
+
headers={'Accept': 'application/json'},
|
321 |
+
params=global_params
|
322 |
+
)
|
323 |
+
|
324 |
+
if global_response.status_code != 200:
|
325 |
+
print(f"Failed to fetch global rankings page {offset//page_size + 1}")
|
326 |
+
continue
|
327 |
+
|
328 |
+
page_models = global_response.json()
|
329 |
+
if not page_models: # ๋ ์ด์ ๋ชจ๋ธ์ด ์์ผ๋ฉด ์ค๋จ
|
330 |
+
break
|
331 |
+
|
332 |
+
all_global_models.extend(page_models)
|
333 |
+
print(f"Fetched {len(all_global_models)} models so far...")
|
334 |
|
335 |
# ์ ์ฒด ์์ ๋งต ์์ฑ
|
336 |
+
global_ranks = {}
|
337 |
+
for idx, model in enumerate(all_global_models, 1):
|
338 |
+
model_id = normalize_model_id(model.get('id', ''))
|
339 |
+
global_ranks[model_id] = {
|
340 |
+
'rank': idx,
|
341 |
+
'downloads': model.get('downloads', 0),
|
342 |
+
'likes': model.get('likes', 0)
|
343 |
+
}
|
344 |
|
345 |
+
print(f"Total models fetched: {len(global_ranks)}")
|
|
|
|
|
346 |
|
347 |
+
# target_models์ ์์ ํ์ธ ๋ฐ ์ ์ฅ
|
348 |
+
filtered_models = []
|
349 |
+
for model_id in target_models.keys():
|
350 |
+
normalized_id = normalize_model_id(model_id)
|
351 |
+
if normalized_id in global_ranks:
|
352 |
+
rank_info = global_ranks[normalized_id]
|
353 |
+
model_info = {
|
354 |
+
'id': model_id,
|
355 |
+
'global_rank': rank_info['rank'],
|
356 |
+
'downloads': rank_info['downloads'],
|
357 |
+
'likes': rank_info['likes'],
|
358 |
+
'title': 'No Title' # ํ์ํ ๊ฒฝ์ฐ ๊ฐ๋ณ API ํธ์ถ๋ก ๊ฐ์ ธ์ฌ ์ ์์
|
359 |
+
}
|
360 |
+
print(f"Found {model_id} at rank {rank_info['rank']}")
|
361 |
+
else:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
362 |
model_info = {
|
363 |
'id': model_id,
|
364 |
+
'global_rank': 'Not found',
|
365 |
'downloads': 0,
|
366 |
'likes': 0,
|
367 |
+
'title': 'No Title'
|
|
|
368 |
}
|
369 |
+
print(f"Model {model_id} not found in rankings")
|
370 |
+
filtered_models.append(model_info)
|
371 |
|
372 |
+
# ์์๋ก ์ ๋ ฌ
|
373 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
374 |
|
375 |
if not filtered_models:
|