Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve whoami() error messages by specifying token source #2814

Merged
merged 20 commits into from
Feb 5, 2025
Merged
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
117 changes: 102 additions & 15 deletions src/huggingface_hub/hf_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1615,46 +1615,133 @@ def run_as_future(self, fn: Callable[..., R], *args, **kwargs) -> Future[R]:
self._thread_pool
return self._thread_pool.submit(fn, *args, **kwargs)


from huggingface_hub.utils._auth import _get_token_from_google_colab, _get_token_from_environment, _get_token_from_file
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you move this line to the imports section ? And run make style + make quality locally to make sure the code styling aligns with the standard of this repo.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

must be moved to the import section (around

from .utils.endpoint_helpers import _is_emission_within_threshold
). And run make style after to make sure imports are correctly sorted

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

just did the changes


def _get_token_source(self, token: Optional[Union[bool, str]] = None) -> str:
"""
Determine the source of the token being used.

Args:
token: The token parameter passed to whoami

Returns:
str: Source of the token ("parameter", "environment", "file", "colab", or "unknown")
"""
if isinstance(token, str):
return "parameter"

# Use the helper functions to check token sources
if _get_token_from_environment():
return "environment"

if _get_token_from_file():
return "file"

if _get_token_from_google_colab():
return "colab"

return "unknown"
Wauplin marked this conversation as resolved.
Show resolved Hide resolved

Wauplin marked this conversation as resolved.
Show resolved Hide resolved
def _get_token_error_message(self, source: str) -> str:
"""
Generate a detailed error message based on the token source.

Args:
source: The source of the token ("parameter", "environment", "file", "colab", or "unknown")

Returns:
str: Customized error message
"""
base_message = "Invalid user token. "

if source == "parameter":
return base_message + (
"The token you provided as parameter is invalid. Please verify it's correct "
"or remove it to use the default authentication method."
)
elif source == "environment":
return base_message + (
"The token from HF_TOKEN environment variable is invalid. "
"Note that HF_TOKEN takes precedence over the token file. "
"Either update HF_TOKEN or unset it to use the token from `huggingface-cli login`."
)
elif source == "file":
return base_message + (
"The token stored in ~/.cache/huggingface/token is invalid. "
"Please run `huggingface-cli login` to update it."
)
elif source == "colab":
return base_message + (
"The token from Google Colab vault is invalid. "
"Please run `huggingface-cli login` in your Colab notebook to update it."
)
else:
return base_message + (
"Please make sure you are properly logged in by executing `huggingface-cli login`, "
"or provide a valid token."
)
Wauplin marked this conversation as resolved.
Show resolved Hide resolved


Wauplin marked this conversation as resolved.
Show resolved Hide resolved
@validate_hf_hub_args
def whoami(self, token: Union[bool, str, None] = None) -> Dict:
"""
Call HF API to know "whoami".

Wauplin marked this conversation as resolved.
Show resolved Hide resolved
Args:
token (Union[bool, str, None], optional):
A valid user access token (string). Defaults to the locally saved
token, which is the recommended method for authentication (see
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
To disable authentication, pass `False`.
"""
# Get the effective token using the helper function get_token
effective_token = get_token(token or self.token or True)

r = get_session().get(
f"{self.endpoint}/api/whoami-v2",
headers=self._build_hf_headers(
# If `token` is provided and not `None`, it will be used by default.
# Otherwise, the token must be retrieved from cache or env variable.
token=(token or self.token or True),
),
headers=self._build_hf_headers(token=effective_token),
)

try:
hf_raise_for_status(r)
except HTTPError as e:
raise HTTPError(
"Invalid user token. If you didn't pass a user token, make sure you "
"are properly logged in by executing `huggingface-cli login`, and "
"if you did pass a user token, double-check it's correct.",
request=e.request,
response=e.response,
) from e
return r.json()
error_message = "Invalid user token."

# Check which token is the effective one and generate the error message accordingly
if effective_token == _get_token_from_google_colab():
error_message += (
" The token from Google Colab vault is invalid. "
"Please run `huggingface-cli login` in your Colab notebook to update it."
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
"Please run `huggingface-cli login` in your Colab notebook to update it."
"Please update it from the UI."

)
elif effective_token == _get_token_from_environment():
error_message += (
" The token from HF_TOKEN environment variable is invalid. "
"Note that HF_TOKEN takes precedence over the token file. "
"Either update HF_TOKEN or unset it to use the token from `huggingface-cli login`."
Wauplin marked this conversation as resolved.
Show resolved Hide resolved
)
elif effective_token == _get_token_from_file():
error_message += (
" The token stored in ~/.cache/huggingface/token is invalid. "
"Please run `huggingface-cli login` to update it."
)
aniketqw marked this conversation as resolved.
Show resolved Hide resolved
else:
error_message += (
" Please make sure you are properly logged in by executing `huggingface-cli login`, "
"or provide a valid token."
)

raise HTTPError(error_message, request=e.request, response=e.response) from e

return r.json()
@_deprecate_method(
Wauplin marked this conversation as resolved.
Show resolved Hide resolved
version="1.0",
message=(
"Permissions are more complex than when `get_token_permission` was first introduced. "
"OAuth and fine-grain tokens allows for more detailed permissions. "
"If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key."
),
)
)
Wauplin marked this conversation as resolved.
Show resolved Hide resolved
def get_token_permission(
self, token: Union[bool, str, None] = None
) -> Literal["read", "write", "fineGrained", None]:
Expand Down