katuni4ka echarlaix HF Staff commited on
Commit
66e32bb
·
verified ·
1 Parent(s): a3e2f44

upgrade-optimum-intel (#2)

Browse files

- upgrade optimum-intel (19d116b68e22ead3071db5bdb289979cdd900cb4)
- remove need to check whether the model needs to be exported (9a0c2c811616089fd31e11d1730fcbdc9e143628)


Co-authored-by: Ella Charlaix <[email protected]>

Files changed (2) hide show
  1. app.py +0 -10
  2. requirements.txt +1 -5
app.py CHANGED
@@ -10,8 +10,6 @@ from tempfile import TemporaryDirectory
10
 
11
  from huggingface_hub.file_download import repo_folder_name
12
  from optimum.exporters import TasksManager
13
-
14
- from optimum.intel.utils.modeling_utils import _find_files_matching_pattern
15
  from optimum.intel import (
16
  OVModelForAudioClassification,
17
  OVModelForCausalLM,
@@ -85,13 +83,6 @@ def quantize_model(
85
  return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
86
 
87
  auto_model_class = _HEAD_TO_AUTOMODELS[task]
88
- ov_files = _find_files_matching_pattern(
89
- model_id,
90
- pattern=r"(.*)?openvino(.*)?\_model.xml",
91
- use_auth_token=oauth_token.token,
92
- )
93
- export = len(ov_files) == 0
94
-
95
  if calibration_dataset == "None":
96
  calibration_dataset = None
97
 
@@ -125,7 +116,6 @@ def quantize_model(
125
  api.snapshot_download(repo_id=model_id, local_dir=folder, allow_patterns=["*.json"])
126
  ov_model = eval(auto_model_class).from_pretrained(
127
  model_id,
128
- export=export,
129
  cache_dir=folder,
130
  token=oauth_token.token,
131
  quantization_config=quantization_config
 
10
 
11
  from huggingface_hub.file_download import repo_folder_name
12
  from optimum.exporters import TasksManager
 
 
13
  from optimum.intel import (
14
  OVModelForAudioClassification,
15
  OVModelForCausalLM,
 
83
  return f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported"
84
 
85
  auto_model_class = _HEAD_TO_AUTOMODELS[task]
 
 
 
 
 
 
 
86
  if calibration_dataset == "None":
87
  calibration_dataset = None
88
 
 
116
  api.snapshot_download(repo_id=model_id, local_dir=folder, allow_patterns=["*.json"])
117
  ov_model = eval(auto_model_class).from_pretrained(
118
  model_id,
 
119
  cache_dir=folder,
120
  token=oauth_token.token,
121
  quantization_config=quantization_config
requirements.txt CHANGED
@@ -1,8 +1,4 @@
1
  huggingface_hub==0.23.4
2
  gradio[oauth]>=4.37.2
3
  gradio_huggingfacehub_search==0.0.6
4
- transformers==4.42.4
5
- diffusers==0.29.1
6
- openvino
7
- nncf
8
- git+https://github.com/huggingface/optimum-intel.git@automatic-export
 
1
  huggingface_hub==0.23.4
2
  gradio[oauth]>=4.37.2
3
  gradio_huggingfacehub_search==0.0.6
4
+ optimum-intel[openvino,diffusers]==1.23