Clémentine
commited on
Commit
·
5491f2d
1
Parent(s):
ed118a6
updated GPTQ display!
Browse files- app.py +9 -9
- model_info_cache.pkl +2 -2
- model_size_cache.pkl +2 -2
- src/display_models/get_model_metadata.py +4 -2
- src/display_models/read_results.py +2 -0
app.py
CHANGED
|
@@ -253,14 +253,14 @@ def select_columns(df: pd.DataFrame, columns: list) -> pd.DataFrame:
|
|
| 253 |
|
| 254 |
|
| 255 |
NUMERIC_INTERVALS = {
|
| 256 |
-
"
|
| 257 |
-
"0~1.
|
| 258 |
-
"1.5~
|
| 259 |
-
"3~
|
| 260 |
-
"7~
|
| 261 |
-
"13~
|
| 262 |
-
"35~
|
| 263 |
-
"
|
| 264 |
}
|
| 265 |
|
| 266 |
|
|
@@ -378,7 +378,7 @@ with demo:
|
|
| 378 |
elem_id="filter-columns-precision",
|
| 379 |
)
|
| 380 |
filter_columns_size = gr.CheckboxGroup(
|
| 381 |
-
label="Model sizes",
|
| 382 |
choices=list(NUMERIC_INTERVALS.keys()),
|
| 383 |
value=list(NUMERIC_INTERVALS.keys()),
|
| 384 |
interactive=True,
|
|
|
|
| 253 |
|
| 254 |
|
| 255 |
NUMERIC_INTERVALS = {
|
| 256 |
+
"?": pd.Interval(-1, 0, closed="right"),
|
| 257 |
+
"0~1.5": pd.Interval(0, 1.5, closed="right"),
|
| 258 |
+
"1.5~3": pd.Interval(1.5, 3, closed="right"),
|
| 259 |
+
"3~7": pd.Interval(3, 7, closed="right"),
|
| 260 |
+
"7~13": pd.Interval(7, 13, closed="right"),
|
| 261 |
+
"13~35": pd.Interval(13, 35, closed="right"),
|
| 262 |
+
"35~60": pd.Interval(35, 60, closed="right"),
|
| 263 |
+
"60+": pd.Interval(60, 10000, closed="right"),
|
| 264 |
}
|
| 265 |
|
| 266 |
|
|
|
|
| 378 |
elem_id="filter-columns-precision",
|
| 379 |
)
|
| 380 |
filter_columns_size = gr.CheckboxGroup(
|
| 381 |
+
label="Model sizes (in billions of parameters)",
|
| 382 |
choices=list(NUMERIC_INTERVALS.keys()),
|
| 383 |
value=list(NUMERIC_INTERVALS.keys()),
|
| 384 |
interactive=True,
|
model_info_cache.pkl
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f98e18ea0fcf11737e108f966f6d8b09120c6a2c231b70e9e242e9bba3145a47
|
| 3 |
+
size 3780284
|
model_size_cache.pkl
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ace7167a258f711fa7ffeaadddc6ebef8ccb92651dce8b805228c2f18c988958
|
| 3 |
+
size 75324
|
src/display_models/get_model_metadata.py
CHANGED
|
@@ -45,13 +45,15 @@ def get_model_infos_from_hub(leaderboard_data: List[dict]):
|
|
| 45 |
model_data[AutoEvalColumn.license.name] = None
|
| 46 |
model_data[AutoEvalColumn.likes.name] = None
|
| 47 |
if model_name not in model_size_cache:
|
| 48 |
-
|
|
|
|
| 49 |
model_data[AutoEvalColumn.params.name] = model_size_cache[model_name]
|
| 50 |
|
| 51 |
model_data[AutoEvalColumn.license.name] = get_model_license(model_info)
|
| 52 |
model_data[AutoEvalColumn.likes.name] = get_model_likes(model_info)
|
| 53 |
if model_name not in model_size_cache:
|
| 54 |
-
|
|
|
|
| 55 |
model_data[AutoEvalColumn.params.name] = model_size_cache[model_name]
|
| 56 |
|
| 57 |
# save cache to disk in pickle format
|
|
|
|
| 45 |
model_data[AutoEvalColumn.license.name] = None
|
| 46 |
model_data[AutoEvalColumn.likes.name] = None
|
| 47 |
if model_name not in model_size_cache:
|
| 48 |
+
size_factor = 8 if model_data["Precision"] == "GPTQ" else 1
|
| 49 |
+
model_size_cache[model_name] = size_factor * get_model_size(model_name, None)
|
| 50 |
model_data[AutoEvalColumn.params.name] = model_size_cache[model_name]
|
| 51 |
|
| 52 |
model_data[AutoEvalColumn.license.name] = get_model_license(model_info)
|
| 53 |
model_data[AutoEvalColumn.likes.name] = get_model_likes(model_info)
|
| 54 |
if model_name not in model_size_cache:
|
| 55 |
+
size_factor = 8 if model_data["Precision"] == "GPTQ" else 1
|
| 56 |
+
model_size_cache[model_name] = size_factor * get_model_size(model_name, model_info)
|
| 57 |
model_data[AutoEvalColumn.params.name] = model_size_cache[model_name]
|
| 58 |
|
| 59 |
# save cache to disk in pickle format
|
src/display_models/read_results.py
CHANGED
|
@@ -81,6 +81,8 @@ def parse_eval_result(json_filepath: str) -> Tuple[str, list[dict]]:
|
|
| 81 |
model_split = model.split("/", 1)
|
| 82 |
|
| 83 |
precision = config.get("model_dtype")
|
|
|
|
|
|
|
| 84 |
|
| 85 |
model = model_split[-1]
|
| 86 |
|
|
|
|
| 81 |
model_split = model.split("/", 1)
|
| 82 |
|
| 83 |
precision = config.get("model_dtype")
|
| 84 |
+
if precision == "None":
|
| 85 |
+
precision = "GPTQ"
|
| 86 |
|
| 87 |
model = model_split[-1]
|
| 88 |
|