Я пытаюсь протестировать модель обнимающего лица prithivida/parrot_paraphraser_on_T5
, но ошибка получения токена не найдена.
from parrot import Parrot
import torch
import warnings
warnings.filterwarnings("ignore")
parrot = Parrot(model_tag = "prithivida/parrot_paraphraser_on_T5", use_gpu=False)
Ошибка, которую я получаю
OSError Traceback (most recent call last)
Cell In [10], line 2
1 #Init models (make sure you init ONLY once if you integrate this to your code)
----> 2 parrot = Parrot(model_tag = "prithivida/parrot_paraphraser_on_T5", use_gpu=False)
File ~/.local/lib/python3.10/site-packages/parrot/parrot.py:10, in Parrot.__init__(self, model_tag, use_gpu)
8 from parrot.filters import Fluency
9 from parrot.filters import Diversity
---> 10 self.tokenizer = AutoTokenizer.from_pretrained(model_tag, use_auth_token=True)
11 self.model = AutoModelForSeq2SeqLM.from_pretrained(model_tag, use_auth_token=True)
12 self.adequacy_score = Adequacy()
File ~/.local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py:560, in AutoTokenizer.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs)
557 return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
559 # Next, let's try to use the tokenizer_config file to get the tokenizer class.
--> 560 tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
561 if "_commit_hash" in tokenizer_config:
562 kwargs["_commit_hash"] = tokenizer_config["_commit_hash"]
File ~/.local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py:412, in get_tokenizer_config(pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, **kwargs)
353 """
354 Loads the tokenizer configuration from a pretrained model tokenizer configuration.
355
(...)
409 tokenizer_config = get_tokenizer_config("tokenizer-test")
410 ```"""
411 commit_hash = kwargs.get("_commit_hash", None)
--> 412 resolved_config_file = cached_file(
413 pretrained_model_name_or_path,
414 TOKENIZER_CONFIG_FILE,
415 cache_dir=cache_dir,
416 force_download=force_download,
417 resume_download=resume_download,
418 proxies=proxies,
419 use_auth_token=use_auth_token,
420 revision=revision,
421 local_files_only=local_files_only,
422 _raise_exceptions_for_missing_entries=False,
423 _raise_exceptions_for_connection_errors=False,
424 _commit_hash=commit_hash,
425 )
426 if resolved_config_file is None:
427 logger.info("Could not locate the tokenizer configuration file, will try to use the model config instead.")
File ~/.local/lib/python3.10/site-packages/transformers/utils/hub.py:409, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, subfolder, user_agent, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash)
406 user_agent = http_user_agent(user_agent)
407 try:
408 # Load from URL or cache if already cached
--> 409 resolved_file = hf_hub_download(
410 path_or_repo_id,
411 filename,
412 subfolder=None if len(subfolder) == 0 else subfolder,
413 revision=revision,
414 cache_dir=cache_dir,
415 user_agent=user_agent,
416 force_download=force_download,
417 proxies=proxies,
418 resume_download=resume_download,
419 use_auth_token=use_auth_token,
420 local_files_only=local_files_only,
421 )
423 except RepositoryNotFoundError:
424 raise EnvironmentError(
425 f"{path_or_repo_id} is not a local folder and is not a valid model identifier "
426 "listed on 'https://huggingface.co/models'\nIf this is a private repository, make sure to "
427 "pass a token having permission to this repo with `use_auth_token` or log in with "
428 "`huggingface-cli login` and pass `use_auth_token=True`."
429 )
File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:124, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
119 if check_use_auth_token:
120 kwargs = smoothly_deprecate_use_auth_token(
121 fn_name=fn.__name__, has_token=has_token, kwargs=kwargs
122 )
--> 124 return fn(*args, **kwargs)
File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:1052, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, user_agent, force_download, force_filename, proxies, etag_timeout, resume_download, token, local_files_only, legacy_cache_layout)
1048 return pointer_path
1050 url = hf_hub_url(repo_id, filename, repo_type=repo_type, revision=revision)
-> 1052 headers = build_hf_headers(
1053 token=token,
1054 library_name=library_name,
1055 library_version=library_version,
1056 user_agent=user_agent,
1057 )
1059 url_to_download = url
1060 etag = None
File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:124, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
119 if check_use_auth_token:
120 kwargs = smoothly_deprecate_use_auth_token(
121 fn_name=fn.__name__, has_token=has_token, kwargs=kwargs
122 )
--> 124 return fn(*args, **kwargs)
File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py:117, in build_hf_headers(token, is_write_action, library_name, library_version, user_agent)
44 """
45 Build headers dictionary to send in a HF Hub call.
46
(...)
114 If `token=True` but token is not saved locally.
115 """
116 # Get auth token to send
--> 117 token_to_send = get_token_to_send(token)
118 _validate_token_to_send(token_to_send, is_write_action=is_write_action)
120 # Combine headers
File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py:149, in get_token_to_send(token)
147 if token is True:
148 if cached_token is None:
--> 149 raise EnvironmentError(
150 "Token is required (`token=True`), but no token found. You"
151 " need to provide a token or be logged in to Hugging Face with"
152 " `huggingface-cli login` or `huggingface_hub.login`. See"
153 " https://huggingface.co/settings/tokens."
154 )
155 return cached_token
157 # Case implicit use of the token is forbidden by env variable
OSError: Token is required (`token=True`), but no token found. You need to provide a token or be logged in to Hugging Face with `huggingface-cli login` or `huggingface_hub.login`. See https://huggingface.co/settings/tokens.
У меня загружен секретный токен, но я не знаю, куда его передать и как?
Трассировка стека после обновления токена внутри class Parrot
в ~/.local/lib/python3.10/site-packages/parrot/parrot.py
Traceback (most recent call last):
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/pp.py", line 8, in <module>
parrot = Parrot(model_tag = "prithivida/parrot_paraphraser_on_T5", use_gpu=False)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/parrot/parrot.py", line 10, in __init__
self.tokenizer = AutoTokenizer.from_pretrained(model_tag, use_auth_token=True)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 560, in from_pretrained
tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 412, in get_tokenizer_config
resolved_config_file = cached_file(
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/transformers/utils/hub.py", line 409, in cached_file
resolved_file = hf_hub_download(
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 124, in _inner_fn
return fn(*args, **kwargs)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1052, in hf_hub_download
headers = build_hf_headers(
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 124, in _inner_fn
return fn(*args, **kwargs)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 117, in build_hf_headers
token_to_send = get_token_to_send(token)
File "/media/chinmay/New Volume/myWorks/GIT_Hub/project_parrot_nlp/vnv/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 149, in get_token_to_send
raise EnvironmentError(
OSError: Token is required (`token=True`), but no token found. You need to provide a token or be logged in to Hugging Face with `huggingface-cli login` or `huggingface_hub.login`. See https://huggingface.co/settings/tokens.
Вам придется редактировать исходный код Parrot самостоятельно, пока не будет выпущена новая версия Parrot, которая позволит вам установить токен вашего обнимающего лица.
откройте указанный ниже путь в своем редакторе и примените указанные ниже изменения.
~/.local/lib/python3.10/site-packages/parrot/parrot.py
self.tokenizer = AutoTokenizer.from_pretrained(model_tag, use_auth_token = <your token>)
self.model = AutoModelForSeq2SeqLM.from_pretrained(model_tag, use_auth_token = <your token>)
Вы можете получить токен на панели инструментов Huggingface, если у вас его нет.
@ChinmayNayak Можете ли вы поделиться новой трассировкой стека?
Я поместил обновленную трассировку стека в поле описания.
используйте сгенерированный токен из https://huggingface.co/settings/tokens и пропустите его
установить библиотеку Python Huggingface_hub
pip install huggingface_hub
python -c "from huggingface_hub.hf_api import HfFolder; HfFolder.save_token('YOUR_TOKEN_HERE')"
если вы используете блокнот
from huggingface_hub import notebook_login
notebook_login()
мимо вашего сгенерированного токена
Вышеупомянутое решение сработало для меня.
внесены вышеуказанные изменения. заменил
true
токеном, но проблема все еще существует.use_auth_token = true
заменен моим токеном внутри класса Parrot().