Skip to content

Commit c98e092

Browse files
committed
Remove huggingface_hub
1 parent 1138ff9 commit c98e092

File tree

1 file changed

+20
-23
lines changed

1 file changed

+20
-23
lines changed

optimum/exporters/openvino/__main__.py

+20-23
Original file line numberDiff line numberDiff line change
@@ -247,26 +247,39 @@ class StoreAttr(object):
247247

248248
GPTQQuantizer.post_init_model = post_init_model
249249

250+
model = TasksManager.get_model_from_task(
251+
task,
252+
model_name_or_path,
253+
subfolder=subfolder,
254+
revision=revision,
255+
cache_dir=cache_dir,
256+
use_auth_token=use_auth_token,
257+
local_files_only=local_files_only,
258+
force_download=force_download,
259+
trust_remote_code=trust_remote_code,
260+
framework=framework,
261+
device=device,
262+
library_name=library_name,
263+
**loading_kwargs,
264+
)
265+
250266
# Apply quantization in hybrid mode to Stable Diffusion before export
251267
if (
252268
library_name == "diffusers"
253269
and ov_config
254270
and ov_config.quantization_config
255271
and ov_config.quantization_config.get("dataset", None)
256272
):
257-
import huggingface_hub
258-
259-
model_info = huggingface_hub.model_info(model_name_or_path, revision=revision)
260-
class_name = model_info.config["diffusers"]["_class_name"]
261-
if class_name == "LatentConsistencyModelPipeline":
273+
class_name = model.__class__.__name__
274+
if "LatentConsistencyModelPipeline" in class_name:
262275
from optimum.intel import OVLatentConsistencyModelPipeline
263276

264277
model_cls = OVLatentConsistencyModelPipeline
265-
elif class_name == "StableDiffusionXLPipeline":
278+
elif "StableDiffusionXLPipeline" in class_name:
266279
from optimum.intel import OVStableDiffusionXLPipeline
267280

268281
model_cls = OVStableDiffusionXLPipeline
269-
elif class_name == "StableDiffusionPipeline":
282+
elif "StableDiffusionPipeline" in class_name:
270283
from optimum.intel import OVStableDiffusionPipeline
271284

272285
model_cls = OVStableDiffusionPipeline
@@ -286,22 +299,6 @@ class StoreAttr(object):
286299
model.save_pretrained(output)
287300
return
288301

289-
model = TasksManager.get_model_from_task(
290-
task,
291-
model_name_or_path,
292-
subfolder=subfolder,
293-
revision=revision,
294-
cache_dir=cache_dir,
295-
use_auth_token=use_auth_token,
296-
local_files_only=local_files_only,
297-
force_download=force_download,
298-
trust_remote_code=trust_remote_code,
299-
framework=framework,
300-
device=device,
301-
library_name=library_name,
302-
**loading_kwargs,
303-
)
304-
305302
needs_pad_token_id = task == "text-classification" and getattr(model.config, "pad_token_id", None) is None
306303

307304
if needs_pad_token_id:

0 commit comments

Comments
 (0)