Skip to content

Commit 3b94fdb

Browse files
committed
use built-in generics
1 parent f4fd100 commit 3b94fdb

File tree

109 files changed

+1166
-1183
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

109 files changed

+1166
-1183
lines changed

src/huggingface_hub/_commit_api.py

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from dataclasses import dataclass, field
1212
from itertools import groupby
1313
from pathlib import Path, PurePosixPath
14-
from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union
14+
from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, Literal, Optional, Union
1515

1616
from tqdm.contrib.concurrent import thread_map
1717

@@ -306,7 +306,7 @@ def _validate_path_in_repo(path_in_repo: str) -> str:
306306
CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete]
307307

308308

309-
def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
309+
def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
310310
"""
311311
Warn user when a list of operations is expected to overwrite itself in a single
312312
commit.
@@ -321,7 +321,7 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
321321
delete before upload) but can happen if a user deletes an entire folder and then
322322
add new files to it.
323323
"""
324-
nb_additions_per_path: Dict[str, int] = defaultdict(int)
324+
nb_additions_per_path: dict[str, int] = defaultdict(int)
325325
for operation in operations:
326326
path_in_repo = operation.path_in_repo
327327
if isinstance(operation, CommitOperationAdd):
@@ -355,10 +355,10 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
355355
@validate_hf_hub_args
356356
def _upload_lfs_files(
357357
*,
358-
additions: List[CommitOperationAdd],
358+
additions: list[CommitOperationAdd],
359359
repo_type: str,
360360
repo_id: str,
361-
headers: Dict[str, str],
361+
headers: dict[str, str],
362362
endpoint: Optional[str] = None,
363363
num_threads: int = 5,
364364
revision: Optional[str] = None,
@@ -377,7 +377,7 @@ def _upload_lfs_files(
377377
repo_id (`str`):
378378
A namespace (user or an organization) and a repo name separated
379379
by a `/`.
380-
headers (`Dict[str, str]`):
380+
headers (`dict[str, str]`):
381381
Headers to use for the request, including authorization headers and user agent.
382382
num_threads (`int`, *optional*):
383383
The number of concurrent threads to use when uploading. Defaults to 5.
@@ -395,7 +395,7 @@ def _upload_lfs_files(
395395
# Step 1: retrieve upload instructions from the LFS batch endpoint.
396396
# Upload instructions are retrieved by chunk of 256 files to avoid reaching
397397
# the payload limit.
398-
batch_actions: List[Dict] = []
398+
batch_actions: list[Dict] = []
399399
for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400400
batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401401
upload_infos=[op.upload_info for op in chunk],
@@ -466,10 +466,10 @@ def _wrapped_lfs_upload(batch_action) -> None:
466466
@validate_hf_hub_args
467467
def _upload_xet_files(
468468
*,
469-
additions: List[CommitOperationAdd],
469+
additions: list[CommitOperationAdd],
470470
repo_type: str,
471471
repo_id: str,
472-
headers: Dict[str, str],
472+
headers: dict[str, str],
473473
endpoint: Optional[str] = None,
474474
revision: Optional[str] = None,
475475
create_pr: Optional[bool] = None,
@@ -486,7 +486,7 @@ def _upload_xet_files(
486486
repo_id (`str`):
487487
A namespace (user or an organization) and a repo name separated
488488
by a `/`.
489-
headers (`Dict[str, str]`):
489+
headers (`dict[str, str]`):
490490
Headers to use for the request, including authorization headers and user agent.
491491
endpoint: (`str`, *optional*):
492492
The endpoint to use for the xetcas service. Defaults to `constants.ENDPOINT`.
@@ -555,7 +555,7 @@ def _upload_xet_files(
555555
xet_endpoint = xet_connection_info.endpoint
556556
access_token_info = (xet_connection_info.access_token, xet_connection_info.expiration_unix_epoch)
557557

558-
def token_refresher() -> Tuple[str, int]:
558+
def token_refresher() -> tuple[str, int]:
559559
new_xet_connection = fetch_xet_connection_info_from_repo_info(
560560
token_type=XetTokenType.WRITE,
561561
repo_id=repo_id,
@@ -628,7 +628,7 @@ def _fetch_upload_modes(
628628
additions: Iterable[CommitOperationAdd],
629629
repo_type: str,
630630
repo_id: str,
631-
headers: Dict[str, str],
631+
headers: dict[str, str],
632632
revision: str,
633633
endpoint: Optional[str] = None,
634634
create_pr: bool = False,
@@ -647,7 +647,7 @@ def _fetch_upload_modes(
647647
repo_id (`str`):
648648
A namespace (user or an organization) and a repo name separated
649649
by a `/`.
650-
headers (`Dict[str, str]`):
650+
headers (`dict[str, str]`):
651651
Headers to use for the request, including authorization headers and user agent.
652652
revision (`str`):
653653
The git revision to upload the files to. Can be any valid git revision.
@@ -665,9 +665,9 @@ def _fetch_upload_modes(
665665
endpoint = endpoint if endpoint is not None else constants.ENDPOINT
666666

667667
# Fetch upload mode (LFS or regular) chunk by chunk.
668-
upload_modes: Dict[str, UploadMode] = {}
669-
should_ignore_info: Dict[str, bool] = {}
670-
oid_info: Dict[str, Optional[str]] = {}
668+
upload_modes: dict[str, UploadMode] = {}
669+
should_ignore_info: dict[str, bool] = {}
670+
oid_info: dict[str, Optional[str]] = {}
671671

672672
for chunk in chunk_iterable(additions, 256):
673673
payload: Dict = {
@@ -713,10 +713,10 @@ def _fetch_files_to_copy(
713713
copies: Iterable[CommitOperationCopy],
714714
repo_type: str,
715715
repo_id: str,
716-
headers: Dict[str, str],
716+
headers: dict[str, str],
717717
revision: str,
718718
endpoint: Optional[str] = None,
719-
) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]:
719+
) -> dict[tuple[str, Optional[str]], Union["RepoFile", bytes]]:
720720
"""
721721
Fetch information about the files to copy.
722722
@@ -732,12 +732,12 @@ def _fetch_files_to_copy(
732732
repo_id (`str`):
733733
A namespace (user or an organization) and a repo name separated
734734
by a `/`.
735-
headers (`Dict[str, str]`):
735+
headers (`dict[str, str]`):
736736
Headers to use for the request, including authorization headers and user agent.
737737
revision (`str`):
738738
The git revision to upload the files to. Can be any valid git revision.
739739
740-
Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
740+
Returns: `dict[tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
741741
Key is the file path and revision of the file to copy.
742742
Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files).
743743
@@ -750,9 +750,9 @@ def _fetch_files_to_copy(
750750
from .hf_api import HfApi, RepoFolder
751751

752752
hf_api = HfApi(endpoint=endpoint, headers=headers)
753-
files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
753+
files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
754754
# Store (path, revision) -> oid mapping
755-
oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {}
755+
oid_info: dict[tuple[str, Optional[str]], Optional[str]] = {}
756756
# 1. Fetch OIDs for destination paths in batches.
757757
dest_paths = [op.path_in_repo for op in copies]
758758
for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE):
@@ -812,11 +812,11 @@ def _fetch_files_to_copy(
812812

813813
def _prepare_commit_payload(
814814
operations: Iterable[CommitOperation],
815-
files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]],
815+
files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]],
816816
commit_message: str,
817817
commit_description: Optional[str] = None,
818818
parent_commit: Optional[str] = None,
819-
) -> Iterable[Dict[str, Any]]:
819+
) -> Iterable[dict[str, Any]]:
820820
"""
821821
Builds the payload to POST to the `/commit` API of the Hub.
822822

src/huggingface_hub/_commit_scheduler.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from io import SEEK_END, SEEK_SET, BytesIO
88
from pathlib import Path
99
from threading import Lock, Thread
10-
from typing import Dict, List, Optional, Union
10+
from typing import Optional, Union
1111

1212
from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi
1313
from .utils import filter_repo_objects
@@ -53,9 +53,9 @@ class CommitScheduler:
5353
Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
5454
token (`str`, *optional*):
5555
The token to use to commit to the repo. Defaults to the token saved on the machine.
56-
allow_patterns (`List[str]` or `str`, *optional*):
56+
allow_patterns (`list[str]` or `str`, *optional*):
5757
If provided, only files matching at least one pattern are uploaded.
58-
ignore_patterns (`List[str]` or `str`, *optional*):
58+
ignore_patterns (`list[str]` or `str`, *optional*):
5959
If provided, files matching any of the patterns are not uploaded.
6060
squash_history (`bool`, *optional*):
6161
Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
@@ -108,8 +108,8 @@ def __init__(
108108
revision: Optional[str] = None,
109109
private: Optional[bool] = None,
110110
token: Optional[str] = None,
111-
allow_patterns: Optional[Union[List[str], str]] = None,
112-
ignore_patterns: Optional[Union[List[str], str]] = None,
111+
allow_patterns: Optional[Union[list[str], str]] = None,
112+
ignore_patterns: Optional[Union[list[str], str]] = None,
113113
squash_history: bool = False,
114114
hf_api: Optional["HfApi"] = None,
115115
) -> None:
@@ -138,7 +138,7 @@ def __init__(
138138
self.token = token
139139

140140
# Keep track of already uploaded files
141-
self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp
141+
self.last_uploaded: dict[Path, float] = {} # key is local path, value is timestamp
142142

143143
# Scheduler
144144
if not every > 0:
@@ -232,7 +232,7 @@ def push_to_hub(self) -> Optional[CommitInfo]:
232232
prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else ""
233233

234234
# Filter with pattern + filter out unchanged files + retrieve current file size
235-
files_to_upload: List[_FileToUpload] = []
235+
files_to_upload: list[_FileToUpload] = []
236236
for relpath in filter_repo_objects(
237237
relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns
238238
):

src/huggingface_hub/_inference_endpoints.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,7 @@ def update(
261261
revision: Optional[str] = None,
262262
task: Optional[str] = None,
263263
custom_image: Optional[Dict] = None,
264-
secrets: Optional[Dict[str, str]] = None,
264+
secrets: Optional[dict[str, str]] = None,
265265
) -> "InferenceEndpoint":
266266
"""Update the Inference Endpoint.
267267
@@ -296,7 +296,7 @@ def update(
296296
custom_image (`Dict`, *optional*):
297297
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
298298
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
299-
secrets (`Dict[str, str]`, *optional*):
299+
secrets (`dict[str, str]`, *optional*):
300300
Secret values to inject in the container environment.
301301
Returns:
302302
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.

src/huggingface_hub/_jobs_api.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from dataclasses import dataclass
1616
from datetime import datetime
1717
from enum import Enum
18-
from typing import Any, Dict, List, Optional, Union
18+
from typing import Any, Optional, Union
1919

2020
from huggingface_hub import constants
2121
from huggingface_hub._space_api import SpaceHardware
@@ -71,13 +71,13 @@ class JobInfo:
7171
space_id (`str` or `None`):
7272
The Docker image from Hugging Face Spaces used for the Job.
7373
Can be None if docker_image is present instead.
74-
command (`List[str]` or `None`):
74+
command (`list[str]` or `None`):
7575
Command of the Job, e.g. `["python", "-c", "print('hello world')"]`
76-
arguments (`List[str]` or `None`):
76+
arguments (`list[str]` or `None`):
7777
Arguments passed to the command
78-
environment (`Dict[str]` or `None`):
78+
environment (`dict[str]` or `None`):
7979
Environment variables of the Job as a dictionary.
80-
secrets (`Dict[str]` or `None`):
80+
secrets (`dict[str]` or `None`):
8181
Secret environment variables of the Job (encrypted).
8282
flavor (`str` or `None`):
8383
Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
@@ -111,10 +111,10 @@ class JobInfo:
111111
created_at: Optional[datetime]
112112
docker_image: Optional[str]
113113
space_id: Optional[str]
114-
command: Optional[List[str]]
115-
arguments: Optional[List[str]]
116-
environment: Optional[Dict[str, Any]]
117-
secrets: Optional[Dict[str, Any]]
114+
command: Optional[list[str]]
115+
arguments: Optional[list[str]]
116+
environment: Optional[dict[str, Any]]
117+
secrets: Optional[dict[str, Any]]
118118
flavor: Optional[SpaceHardware]
119119
status: JobStatus
120120
owner: JobOwner
@@ -148,13 +148,13 @@ def __init__(self, **kwargs) -> None:
148148
class JobSpec:
149149
docker_image: Optional[str]
150150
space_id: Optional[str]
151-
command: Optional[List[str]]
152-
arguments: Optional[List[str]]
153-
environment: Optional[Dict[str, Any]]
154-
secrets: Optional[Dict[str, Any]]
151+
command: Optional[list[str]]
152+
arguments: Optional[list[str]]
153+
environment: Optional[dict[str, Any]]
154+
secrets: Optional[dict[str, Any]]
155155
flavor: Optional[SpaceHardware]
156156
timeout: Optional[int]
157-
tags: Optional[List[str]]
157+
tags: Optional[list[str]]
158158
arch: Optional[str]
159159

160160
def __init__(self, **kwargs) -> None:
@@ -202,7 +202,7 @@ class ScheduledJobInfo:
202202
Scheduled Job ID.
203203
created_at (`datetime` or `None`):
204204
When the scheduled Job was created.
205-
tags (`List[str]` or `None`):
205+
tags (`list[str]` or `None`):
206206
The tags of the scheduled Job.
207207
schedule (`str` or `None`):
208208
One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
@@ -263,14 +263,14 @@ def __init__(self, **kwargs) -> None:
263263
def _create_job_spec(
264264
*,
265265
image: str,
266-
command: List[str],
267-
env: Optional[Dict[str, Any]],
268-
secrets: Optional[Dict[str, Any]],
266+
command: list[str],
267+
env: Optional[dict[str, Any]],
268+
secrets: Optional[dict[str, Any]],
269269
flavor: Optional[SpaceHardware],
270270
timeout: Optional[Union[int, float, str]],
271-
) -> Dict[str, Any]:
271+
) -> dict[str, Any]:
272272
# prepare job spec to send to HF Jobs API
273-
job_spec: Dict[str, Any] = {
273+
job_spec: dict[str, Any] = {
274274
"command": command,
275275
"arguments": [],
276276
"environment": env or {},

src/huggingface_hub/_oauth.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import urllib.parse
77
import warnings
88
from dataclasses import dataclass
9-
from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Union
9+
from typing import TYPE_CHECKING, Dict, Literal, Optional, Union
1010

1111
from . import constants
1212
from .hf_api import whoami
@@ -39,7 +39,7 @@ class OAuthOrgInfo:
3939
Whether the org has a payment method set up. Hugging Face field.
4040
role_in_org (`Optional[str]`, *optional*):
4141
The user's role in the org. Hugging Face field.
42-
security_restrictions (`Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]]`, *optional*):
42+
security_restrictions (`Optional[list[Literal["ip", "token-policy", "mfa", "sso"]]]`, *optional*):
4343
Array of security restrictions that the user hasn't completed for this org. Possible values: "ip", "token-policy", "mfa", "sso". Hugging Face field.
4444
"""
4545

@@ -50,7 +50,7 @@ class OAuthOrgInfo:
5050
is_enterprise: bool
5151
can_pay: Optional[bool] = None
5252
role_in_org: Optional[str] = None
53-
security_restrictions: Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]] = None
53+
security_restrictions: Optional[list[Literal["ip", "token-policy", "mfa", "sso"]]] = None
5454

5555

5656
@dataclass
@@ -79,7 +79,7 @@ class OAuthUserInfo:
7979
Whether the user is a pro user. Hugging Face field.
8080
can_pay (`Optional[bool]`, *optional*):
8181
Whether the user has a payment method set up. Hugging Face field.
82-
orgs (`Optional[List[OrgInfo]]`, *optional*):
82+
orgs (`Optional[list[OrgInfo]]`, *optional*):
8383
List of organizations the user is part of. Hugging Face field.
8484
"""
8585

@@ -93,7 +93,7 @@ class OAuthUserInfo:
9393
website: Optional[str]
9494
is_pro: bool
9595
can_pay: Optional[bool]
96-
orgs: Optional[List[OAuthOrgInfo]]
96+
orgs: Optional[list[OAuthOrgInfo]]
9797

9898

9999
@dataclass
@@ -306,7 +306,7 @@ async def oauth_redirect_callback(request: fastapi.Request) -> RedirectResponse:
306306
target_url = request.query_params.get("_target_url")
307307

308308
# Build redirect URI with the same query params as before and bump nb_redirects count
309-
query_params: Dict[str, Union[int, str]] = {"_nb_redirects": nb_redirects + 1}
309+
query_params: dict[str, Union[int, str]] = {"_nb_redirects": nb_redirects + 1}
310310
if target_url:
311311
query_params["_target_url"] = target_url
312312

@@ -449,7 +449,7 @@ def _get_mocked_oauth_info() -> Dict:
449449
}
450450

451451

452-
def _get_oauth_uris(route_prefix: str = "/") -> Tuple[str, str, str]:
452+
def _get_oauth_uris(route_prefix: str = "/") -> tuple[str, str, str]:
453453
route_prefix = route_prefix.strip("/")
454454
if route_prefix:
455455
route_prefix = f"/{route_prefix}"

0 commit comments

Comments
 (0)