openfree commited on
Commit
5f52c69
ยท
verified ยท
1 Parent(s): 1641e84

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -58
app.py CHANGED
@@ -301,74 +301,75 @@ def get_models_data(progress=gr.Progress()):
301
  try:
302
  progress(0, desc="Fetching global rankings...")
303
 
304
- # ์ „์ฒด ๋ชจ๋ธ ๋ชฉ๋ก ๊ฐ€์ ธ์˜ค๊ธฐ (๋‹ค์šด๋กœ๋“œ ์ˆœ์œผ๋กœ ์ •๋ ฌ)
305
- global_url = "https://huggingface.co/api/models"
306
- global_params = {
307
- 'full': 'true',
308
- 'limit': 10000, # ์ถฉ๋ถ„ํžˆ ํฐ ์ˆ˜๋กœ ์„ค์ •
309
- 'sort': 'downloads',
310
- 'direction': -1
311
- }
312
-
313
- global_response = requests.get(global_url, headers={'Accept': 'application/json'}, params=global_params)
314
- if global_response.status_code != 200:
315
- print(f"Failed to fetch global rankings: {global_response.status_code}")
316
- return create_error_plot(), "<div>์ „์ฒด ์ˆœ์œ„ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ ธ์˜ค๋Š”๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค.</div>", pd.DataFrame()
317
-
318
- global_models = global_response.json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
319
 
320
  # ์ „์ฒด ์ˆœ์œ„ ๋งต ์ƒ์„ฑ
321
- global_ranks = {normalize_model_id(model['id']): idx + 1
322
- for idx, model in enumerate(global_models)}
 
 
 
 
 
 
323
 
324
- # ๊ฐ ๋ชจ๋ธ์˜ ์ƒ์„ธ ์ •๋ณด๋ฅผ ๊ฐœ๋ณ„์ ์œผ๋กœ ๊ฐ€์ ธ์˜ค๊ธฐ
325
- filtered_models = []
326
- total_models = len(target_models)
327
 
328
- for idx, (model_id, model_url) in enumerate(target_models.items()):
329
- progress((idx + 1) / total_models, desc=f"Fetching model {idx + 1}/{total_models}...")
330
-
331
- try:
332
- # ๊ฐœ๋ณ„ ๋ชจ๋ธ API ํ˜ธ์ถœ
333
- model_url_api = f"https://huggingface.co/api/models/{model_id}"
334
- response = requests.get(model_url_api, headers={'Accept': 'application/json'})
335
-
336
- if response.status_code == 200:
337
- model_data = response.json()
338
- normalized_id = normalize_model_id(model_id)
339
- global_rank = global_ranks.get(normalized_id, 'Not in top 10000')
340
-
341
- model_info = {
342
- 'id': model_id,
343
- 'downloads': model_data.get('downloads', 0),
344
- 'likes': model_data.get('likes', 0),
345
- 'title': model_data.get('title', 'No Title'),
346
- 'global_rank': global_rank
347
- }
348
- filtered_models.append(model_info)
349
- print(f"Model {model_id}: Global Rank={global_rank}, Downloads={model_info['downloads']}, Likes={model_info['likes']}")
350
- else:
351
- print(f"Failed to fetch data for {model_id}: {response.status_code}")
352
- model_info = {
353
- 'id': model_id,
354
- 'downloads': 0,
355
- 'likes': 0,
356
- 'title': 'No Title',
357
- 'global_rank': 'Not in top 10000'
358
- }
359
- filtered_models.append(model_info)
360
- except Exception as e:
361
- print(f"Error fetching data for {model_id}: {str(e)}")
362
  model_info = {
363
  'id': model_id,
 
364
  'downloads': 0,
365
  'likes': 0,
366
- 'title': 'No Title',
367
- 'global_rank': 'Not in top 10000'
368
  }
369
- filtered_models.append(model_info)
 
370
 
371
- # ์ „์ฒด ์ˆœ์œ„๋กœ ์ •๋ ฌ
372
  filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
373
 
374
  if not filtered_models:
 
301
  try:
302
  progress(0, desc="Fetching global rankings...")
303
 
304
+ # ์ „์ฒด ๋ชจ๋ธ ๋ชฉ๋ก์„ ์—ฌ๋Ÿฌ ๋ฒˆ ๋‚˜๋ˆ ์„œ ๊ฐ€์ ธ์˜ค๊ธฐ
305
+ all_global_models = []
306
+ page_size = 10000
307
+ max_models = 50000 # ์ถฉ๋ถ„ํžˆ ํฐ ์ˆ˜๋กœ ์„ค์ •
308
+
309
+ for offset in range(0, max_models, page_size):
310
+ global_params = {
311
+ 'full': 'true',
312
+ 'limit': page_size,
313
+ 'offset': offset,
314
+ 'sort': 'downloads',
315
+ 'direction': -1
316
+ }
317
+
318
+ global_response = requests.get(
319
+ "https://huggingface.co/api/models",
320
+ headers={'Accept': 'application/json'},
321
+ params=global_params
322
+ )
323
+
324
+ if global_response.status_code != 200:
325
+ print(f"Failed to fetch global rankings page {offset//page_size + 1}")
326
+ continue
327
+
328
+ page_models = global_response.json()
329
+ if not page_models: # ๋” ์ด์ƒ ๋ชจ๋ธ์ด ์—†์œผ๋ฉด ์ค‘๋‹จ
330
+ break
331
+
332
+ all_global_models.extend(page_models)
333
+ print(f"Fetched {len(all_global_models)} models so far...")
334
 
335
  # ์ „์ฒด ์ˆœ์œ„ ๋งต ์ƒ์„ฑ
336
+ global_ranks = {}
337
+ for idx, model in enumerate(all_global_models, 1):
338
+ model_id = normalize_model_id(model.get('id', ''))
339
+ global_ranks[model_id] = {
340
+ 'rank': idx,
341
+ 'downloads': model.get('downloads', 0),
342
+ 'likes': model.get('likes', 0)
343
+ }
344
 
345
+ print(f"Total models fetched: {len(global_ranks)}")
 
 
346
 
347
+ # target_models์˜ ์ˆœ์œ„ ํ™•์ธ ๋ฐ ์ €์žฅ
348
+ filtered_models = []
349
+ for model_id in target_models.keys():
350
+ normalized_id = normalize_model_id(model_id)
351
+ if normalized_id in global_ranks:
352
+ rank_info = global_ranks[normalized_id]
353
+ model_info = {
354
+ 'id': model_id,
355
+ 'global_rank': rank_info['rank'],
356
+ 'downloads': rank_info['downloads'],
357
+ 'likes': rank_info['likes'],
358
+ 'title': 'No Title' # ํ•„์š”ํ•œ ๊ฒฝ์šฐ ๊ฐœ๋ณ„ API ํ˜ธ์ถœ๋กœ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์žˆ์Œ
359
+ }
360
+ print(f"Found {model_id} at rank {rank_info['rank']}")
361
+ else:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
  model_info = {
363
  'id': model_id,
364
+ 'global_rank': 'Not found',
365
  'downloads': 0,
366
  'likes': 0,
367
+ 'title': 'No Title'
 
368
  }
369
+ print(f"Model {model_id} not found in rankings")
370
+ filtered_models.append(model_info)
371
 
372
+ # ์ˆœ์œ„๋กœ ์ •๋ ฌ
373
  filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
374
 
375
  if not filtered_models: