echarlaix HF Staff commited on
Commit
9a0c2c8
·
1 Parent(s): 19d116b

remove need to check whether the model needs to be exported

Browse files
Files changed (1) hide show
  1. app.py +0 -10
app.py CHANGED
@@ -10,8 +10,6 @@ from tempfile import TemporaryDirectory
10
 
11
  from huggingface_hub.file_download import repo_folder_name
12
  from optimum.exporters import TasksManager
13
-
14
- from optimum.intel.utils.modeling_utils import _find_files_matching_pattern
15
  from optimum.intel import (
16
  OVModelForAudioClassification,
17
  OVModelForCausalLM,
@@ -85,13 +83,6 @@ def quantize_model(
85
  return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
86
 
87
  auto_model_class = _HEAD_TO_AUTOMODELS[task]
88
- ov_files = _find_files_matching_pattern(
89
- model_id,
90
- pattern=r"(.*)?openvino(.*)?\_model.xml",
91
- use_auth_token=oauth_token.token,
92
- )
93
- export = len(ov_files) == 0
94
-
95
  if calibration_dataset == "None":
96
  calibration_dataset = None
97
 
@@ -125,7 +116,6 @@ def quantize_model(
125
  api.snapshot_download(repo_id=model_id, local_dir=folder, allow_patterns=["*.json"])
126
  ov_model = eval(auto_model_class).from_pretrained(
127
  model_id,
128
- export=export,
129
  cache_dir=folder,
130
  token=oauth_token.token,
131
  quantization_config=quantization_config
 
10
 
11
  from huggingface_hub.file_download import repo_folder_name
12
  from optimum.exporters import TasksManager
 
 
13
  from optimum.intel import (
14
  OVModelForAudioClassification,
15
  OVModelForCausalLM,
 
83
  return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
84
 
85
  auto_model_class = _HEAD_TO_AUTOMODELS[task]
 
 
 
 
 
 
 
86
  if calibration_dataset == "None":
87
  calibration_dataset = None
88
 
 
116
  api.snapshot_download(repo_id=model_id, local_dir=folder, allow_patterns=["*.json"])
117
  ov_model = eval(auto_model_class).from_pretrained(
118
  model_id,
 
119
  cache_dir=folder,
120
  token=oauth_token.token,
121
  quantization_config=quantization_config