Skip to content

Commit c287fba

Browse files
Deprecate use_auth_token (#684)
* added token argument * updated docstrings * added deprecation warning * added more warning and swapped token with use_ath_token * use future warning istead * added a test to read from private repo
1 parent 920b237 commit c287fba

13 files changed

+315
-59
lines changed

optimum/exporters/openvino/__main__.py

+17-4
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
import logging
16+
import warnings
1617
from pathlib import Path
1718
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union
1819

@@ -57,6 +58,7 @@ def main_export(
5758
force_download: bool = False,
5859
local_files_only: bool = False,
5960
use_auth_token: Optional[Union[bool, str]] = None,
61+
token: Optional[Union[bool, str]] = None,
6062
model_kwargs: Optional[Dict[str, Any]] = None,
6163
custom_export_configs: Optional[Dict[str, "OnnxConfig"]] = None,
6264
fn_get_submodels: Optional[Callable] = None,
@@ -107,9 +109,11 @@ def main_export(
107109
cached versions if they exist.
108110
local_files_only (`Optional[bool]`, defaults to `False`):
109111
Whether or not to only look at local files (i.e., do not try to download the model).
110-
use_auth_token (`Optional[str]`, defaults to `None`):
112+
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
113+
Deprecated. Please use `token` instead.
114+
token (Optional[Union[bool, str]], defaults to `None`):
111115
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
112-
when running `transformers-cli login` (stored in `~/.huggingface`).
116+
when running `huggingface-cli login` (stored in `~/.huggingface`).
113117
model_kwargs (`Optional[Dict[str, Any]]`, defaults to `None`):
114118
Experimental usage: keyword arguments to pass to the model during
115119
the export. This argument should be used along the `custom_export_configs` argument
@@ -138,6 +142,15 @@ def main_export(
138142
```
139143
"""
140144

145+
if use_auth_token is not None:
146+
warnings.warn(
147+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
148+
FutureWarning,
149+
)
150+
if token is not None:
151+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
152+
token = use_auth_token
153+
141154
if compression_option is not None:
142155
logger.warning(
143156
"The `compression_option` argument is deprecated and will be removed in optimum-intel v1.17.0. "
@@ -196,7 +209,7 @@ def main_export(
196209
subfolder=subfolder,
197210
revision=revision,
198211
cache_dir=cache_dir,
199-
use_auth_token=use_auth_token,
212+
token=token,
200213
local_files_only=local_files_only,
201214
force_download=force_download,
202215
trust_remote_code=trust_remote_code,
@@ -268,7 +281,7 @@ class StoreAttr(object):
268281
subfolder=subfolder,
269282
revision=revision,
270283
cache_dir=cache_dir,
271-
use_auth_token=use_auth_token,
284+
token=token,
272285
local_files_only=local_files_only,
273286
force_download=force_download,
274287
trust_remote_code=trust_remote_code,

optimum/intel/generation/modeling.py

+26-5
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import inspect
1616
import logging
1717
import os
18+
import warnings
1819
from pathlib import Path
1920
from tempfile import TemporaryDirectory
2021
from typing import Optional, Tuple, Union
@@ -363,15 +364,25 @@ def _from_pretrained(
363364
cls,
364365
model_id: Union[str, Path],
365366
config: PretrainedConfig,
366-
use_auth_token: Optional[Union[bool, str, None]] = None,
367-
revision: Optional[Union[str, None]] = None,
367+
use_auth_token: Optional[Union[bool, str]] = None,
368+
token: Optional[Union[bool, str]] = None,
369+
revision: Optional[str] = None,
368370
force_download: bool = False,
369371
cache_dir: str = HUGGINGFACE_HUB_CACHE,
370372
file_name: Optional[str] = WEIGHTS_NAME,
371373
local_files_only: bool = False,
372374
use_cache: bool = True,
373375
**kwargs,
374376
):
377+
if use_auth_token is not None:
378+
warnings.warn(
379+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
380+
FutureWarning,
381+
)
382+
if token is not None:
383+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
384+
token = use_auth_token
385+
375386
if not getattr(config, "torchscript", False):
376387
raise ValueError("`torchscript` should be set to True to load TorchScript model")
377388

@@ -385,7 +396,7 @@ def _from_pretrained(
385396
model_cache_path = hf_hub_download(
386397
repo_id=model_id,
387398
filename=file_name,
388-
use_auth_token=use_auth_token,
399+
token=token,
389400
revision=revision,
390401
cache_dir=cache_dir,
391402
force_download=force_download,
@@ -408,6 +419,7 @@ def _from_transformers(
408419
model_id: str,
409420
config: PretrainedConfig,
410421
use_auth_token: Optional[Union[bool, str]] = None,
422+
token: Optional[Union[bool, str]] = None,
411423
revision: Optional[str] = None,
412424
force_download: bool = False,
413425
cache_dir: str = HUGGINGFACE_HUB_CACHE,
@@ -417,13 +429,22 @@ def _from_transformers(
417429
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
418430
**kwargs,
419431
):
432+
if use_auth_token is not None:
433+
warnings.warn(
434+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
435+
FutureWarning,
436+
)
437+
if token is not None:
438+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
439+
token = use_auth_token
440+
420441
if is_torch_version("<", "2.1.0"):
421442
raise ImportError("`torch>=2.0.0` is needed to trace your model")
422443

423444
task = cls.export_feature
424445
model_kwargs = {
425446
"revision": revision,
426-
"use_auth_token": use_auth_token,
447+
"token": token,
427448
"cache_dir": cache_dir,
428449
"subfolder": subfolder,
429450
"local_files_only": local_files_only,
@@ -445,7 +466,7 @@ def _from_transformers(
445466
model_id=save_dir_path,
446467
config=config,
447468
use_cache=use_cache,
448-
use_auth_token=use_auth_token,
469+
token=token,
449470
revision=revision,
450471
force_download=force_download,
451472
cache_dir=cache_dir,

optimum/intel/ipex/modeling_base.py

+29-4
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import logging
1717
import os
18+
import warnings
1819
from pathlib import Path
1920
from tempfile import TemporaryDirectory
2021
from typing import Optional, Tuple, Union
@@ -152,6 +153,7 @@ def _from_transformers(
152153
config: PretrainedConfig,
153154
use_cache: bool = True,
154155
use_auth_token: Optional[Union[bool, str]] = None,
156+
token: Optional[Union[bool, str]] = None,
155157
revision: Optional[str] = None,
156158
force_download: bool = False,
157159
cache_dir: str = HUGGINGFACE_HUB_CACHE,
@@ -160,13 +162,24 @@ def _from_transformers(
160162
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
161163
trust_remote_code: bool = False,
162164
):
165+
if use_auth_token is not None:
166+
warnings.warn(
167+
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
168+
FutureWarning,
169+
)
170+
if token is not None:
171+
raise ValueError(
172+
"Both the arguments `use_auth_token` and `token` were specified, which is not supported. Please specify only `token`."
173+
)
174+
token = use_auth_token
175+
163176
if is_torch_version("<", "2.1.0"):
164177
raise ImportError("`torch>=2.0.0` is needed to trace your model")
165178

166179
task = cls.export_feature
167180
model_kwargs = {
168181
"revision": revision,
169-
"use_auth_token": use_auth_token,
182+
"token": token,
170183
"cache_dir": cache_dir,
171184
"subfolder": subfolder,
172185
"local_files_only": local_files_only,
@@ -188,15 +201,27 @@ def _from_pretrained(
188201
cls,
189202
model_id: Union[str, Path],
190203
config: PretrainedConfig,
191-
use_auth_token: Optional[Union[bool, str, None]] = None,
192-
revision: Optional[Union[str, None]] = None,
204+
use_auth_token: Optional[Union[bool, str]] = None,
205+
token: Optional[Union[bool, str]] = None,
206+
revision: Optional[str] = None,
193207
force_download: bool = False,
194208
cache_dir: str = HUGGINGFACE_HUB_CACHE,
195209
file_name: Optional[str] = WEIGHTS_NAME,
196210
local_files_only: bool = False,
197211
subfolder: str = "",
198212
**kwargs,
199213
):
214+
if use_auth_token is not None:
215+
warnings.warn(
216+
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
217+
FutureWarning,
218+
)
219+
if token is not None:
220+
raise ValueError(
221+
"Both the arguments `use_auth_token` and `token` were specified, which is not supported. Please specify only `token`."
222+
)
223+
token = use_auth_token
224+
200225
if not getattr(config, "torchscript", False):
201226
raise ValueError(
202227
"`config.torchscript` should be set to `True`, if your model is not a TorchScript model and needs to be traced please set `export=True` when loading it with `.from_pretrained()`"
@@ -211,7 +236,7 @@ def _from_pretrained(
211236
model_cache_path = hf_hub_download(
212237
repo_id=model_id,
213238
filename=file_name,
214-
use_auth_token=use_auth_token,
239+
token=token,
215240
revision=revision,
216241
cache_dir=cache_dir,
217242
force_download=force_download,

optimum/intel/neural_compressor/modeling_base.py

+15-4
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import logging
1616
import os
17+
import warnings
1718
from pathlib import Path
1819
from tempfile import TemporaryDirectory
1920
from typing import Dict, Optional, Union
@@ -98,8 +99,9 @@ def _from_pretrained(
9899
cls,
99100
model_id: Union[str, Path],
100101
config: PretrainedConfig,
101-
use_auth_token: Optional[Union[bool, str, None]] = None,
102-
revision: Optional[Union[str, None]] = None,
102+
use_auth_token: Optional[Union[bool, str]] = None,
103+
token: Optional[Union[bool, str]] = None,
104+
revision: Optional[str] = None,
103105
force_download: bool = False,
104106
cache_dir: str = HUGGINGFACE_HUB_CACHE,
105107
file_name: str = WEIGHTS_NAME,
@@ -108,6 +110,15 @@ def _from_pretrained(
108110
trust_remote_code: bool = False,
109111
**kwargs,
110112
):
113+
if use_auth_token is not None:
114+
warnings.warn(
115+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
116+
FutureWarning,
117+
)
118+
if token is not None:
119+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
120+
token = use_auth_token
121+
111122
model_name_or_path = kwargs.pop("model_name_or_path", None)
112123
if model_name_or_path is not None:
113124
logger.warning("`model_name_or_path` is deprecated please use `model_id`")
@@ -122,7 +133,7 @@ def _from_pretrained(
122133
repo_id=model_id,
123134
filename=file_name,
124135
subfolder=subfolder,
125-
use_auth_token=use_auth_token,
136+
token=token,
126137
revision=revision,
127138
cache_dir=cache_dir,
128139
force_download=force_download,
@@ -145,7 +156,7 @@ def _from_pretrained(
145156

146157
return _BaseQBitsAutoModelClass.from_pretrained(
147158
pretrained_model_name_or_path=model_id,
148-
use_auth_token=use_auth_token,
159+
token=token,
149160
revision=revision,
150161
force_download=force_download,
151162
cache_dir=cache_dir,

optimum/intel/neural_compressor/quantization.py

+18-4
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import inspect
1717
import logging
1818
import types
19+
import warnings
1920
from enum import Enum
2021
from itertools import chain
2122
from pathlib import Path
@@ -446,7 +447,8 @@ def get_calibration_dataset(
446447
dataset_split: str = "train",
447448
preprocess_function: Optional[Callable] = None,
448449
preprocess_batch: bool = True,
449-
use_auth_token: bool = False,
450+
use_auth_token: Optional[Union[bool, str]] = None,
451+
token: Optional[Union[bool, str]] = None,
450452
) -> Dataset:
451453
"""
452454
Create the calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
@@ -465,16 +467,28 @@ def get_calibration_dataset(
465467
Processing function to apply to each example after loading dataset.
466468
preprocess_batch (`bool`, defaults to `True`):
467469
Whether the `preprocess_function` should be batched.
468-
use_auth_token (`bool`, defaults to `False`):
469-
Whether to use the token generated when running `transformers-cli login`.
470+
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
471+
Deprecated. Please use `token` instead.
472+
token (Optional[Union[bool, str]], defaults to `None`):
473+
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
474+
when running `huggingface-cli login` (stored in `~/.huggingface`).
470475
Returns:
471476
The calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
472477
"""
478+
if use_auth_token is not None:
479+
warnings.warn(
480+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
481+
FutureWarning,
482+
)
483+
if token is not None:
484+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
485+
token = use_auth_token
486+
473487
calibration_dataset = load_dataset(
474488
dataset_name,
475489
name=dataset_config_name,
476490
split=dataset_split,
477-
use_auth_token=use_auth_token,
491+
token=token,
478492
)
479493

480494
if num_samples is not None:

optimum/intel/openvino/loaders.py

+18-5
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
import logging
16+
import warnings
1617
from typing import Dict, List, Optional, Union
1718

1819
import torch
@@ -188,9 +189,11 @@ def load_textual_inversion(
188189
local_files_only (`bool`, *optional*, defaults to `False`):
189190
Whether to only load local model weights and configuration files or not. If set to `True`, the model
190191
won't be downloaded from the Hub.
191-
use_auth_token (`str` or *bool*, *optional*):
192-
The token to use as HTTP bearer authorization for remote files. If `True`, the token generated from
193-
`diffusers-cli login` (stored in `~/.huggingface`) is used.
192+
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
193+
Deprecated. Please use `token` instead.
194+
token (Optional[Union[bool, str]], defaults to `None`):
195+
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
196+
when running `huggingface-cli login` (stored in `~/.huggingface`).
194197
revision (`str`, *optional*, defaults to `"main"`):
195198
The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier
196199
allowed by Git.
@@ -258,11 +261,21 @@ def load_textual_inversion(
258261
proxies = kwargs.pop("proxies", None)
259262
local_files_only = kwargs.pop("local_files_only", HF_HUB_OFFLINE)
260263
use_auth_token = kwargs.pop("use_auth_token", None)
264+
token = kwargs.pop("token", None)
261265
revision = kwargs.pop("revision", None)
262266
subfolder = kwargs.pop("subfolder", None)
263267
weight_name = kwargs.pop("weight_name", None)
264268
use_safetensors = kwargs.pop("use_safetensors", None)
265269

270+
if use_auth_token is not None:
271+
warnings.warn(
272+
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
273+
FutureWarning,
274+
)
275+
if token is not None:
276+
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
277+
token = use_auth_token
278+
266279
if use_safetensors and not is_safetensors_available():
267280
raise ValueError(
268281
"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetensors"
@@ -319,7 +332,7 @@ def load_textual_inversion(
319332
resume_download=resume_download,
320333
proxies=proxies,
321334
local_files_only=local_files_only,
322-
use_auth_token=use_auth_token,
335+
use_auth_token=token, # still uses use_auth_token
323336
revision=revision,
324337
subfolder=subfolder,
325338
user_agent=user_agent,
@@ -340,7 +353,7 @@ def load_textual_inversion(
340353
resume_download=resume_download,
341354
proxies=proxies,
342355
local_files_only=local_files_only,
343-
use_auth_token=use_auth_token,
356+
use_auth_token=token, # still uses use_auth_token
344357
revision=revision,
345358
subfolder=subfolder,
346359
user_agent=user_agent,

0 commit comments

Comments
 (0)