Skip to content

Commit 14b59ef

Browse files
Apply ruff/flake8-type-checking rule TC006
TC006 Add quotes to type expression in `typing.cast()`
1 parent 8a04207 commit 14b59ef

File tree

17 files changed

+65
-64
lines changed

17 files changed

+65
-64
lines changed

src/zarr/api/asynchronous.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@ async def open(
321321
try:
322322
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
323323
# TODO: remove this cast when we fix typing for array metadata dicts
324-
_metadata_dict = cast(ArrayMetadataDict, metadata_dict)
324+
_metadata_dict = cast("ArrayMetadataDict", metadata_dict)
325325
# for v2, the above would already have raised an exception if not an array
326326
zarr_format = _metadata_dict["zarr_format"]
327327
is_v3_array = zarr_format == 3 and _metadata_dict.get("node_type") == "array"

src/zarr/codecs/crc32c_.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,9 @@ async def _decode_single(
4040
inner_bytes = data[:-4]
4141

4242
# Need to do a manual cast until https://github.com/numpy/numpy/issues/26783 is resolved
43-
computed_checksum = np.uint32(crc32c(cast(typing_extensions.Buffer, inner_bytes))).tobytes()
43+
computed_checksum = np.uint32(
44+
crc32c(cast("typing_extensions.Buffer", inner_bytes))
45+
).tobytes()
4446
stored_checksum = bytes(crc32_bytes)
4547
if computed_checksum != stored_checksum:
4648
raise ValueError(
@@ -55,7 +57,7 @@ async def _encode_single(
5557
) -> Buffer | None:
5658
data = chunk_bytes.as_numpy_array()
5759
# Calculate the checksum and "cast" it to a numpy array
58-
checksum = np.array([crc32c(cast(typing_extensions.Buffer, data))], dtype=np.uint32)
60+
checksum = np.array([crc32c(cast("typing_extensions.Buffer", data))], dtype=np.uint32)
5961
# Append the checksum (as bytes) to the data
6062
return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("B")))
6163

src/zarr/codecs/sharding.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ class _ShardIndex(NamedTuple):
115115
def chunks_per_shard(self) -> ChunkCoords:
116116
result = tuple(self.offsets_and_lengths.shape[0:-1])
117117
# The cast is required until https://github.com/numpy/numpy/pull/27211 is merged
118-
return cast(ChunkCoords, result)
118+
return cast("ChunkCoords", result)
119119

120120
def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords:
121121
return tuple(

src/zarr/codecs/transpose.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]:
2323
raise TypeError(f"Expected an iterable. Got {data} instead.")
2424
if not all(isinstance(a, int) for a in data):
2525
raise TypeError(f"Expected an iterable of integers. Got {data} instead.")
26-
return tuple(cast(Iterable[int], data))
26+
return tuple(cast("Iterable[int]", data))
2727

2828

2929
@dataclass(frozen=True)

src/zarr/core/array.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ def __init__(
268268
if isinstance(metadata, dict):
269269
zarr_format = metadata["zarr_format"]
270270
# TODO: remove this when we extensively type the dict representation of metadata
271-
_metadata = cast(dict[str, JSON], metadata)
271+
_metadata = cast("dict[str, JSON]", metadata)
272272
if zarr_format == 2:
273273
metadata = ArrayV2Metadata.from_dict(_metadata)
274274
elif zarr_format == 3:
@@ -898,7 +898,7 @@ async def open(
898898
store_path = await make_store_path(store)
899899
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
900900
# TODO: remove this cast when we have better type hints
901-
_metadata_dict = cast(ArrayV3MetadataDict, metadata_dict)
901+
_metadata_dict = cast("ArrayV3MetadataDict", metadata_dict)
902902
return cls(store_path=store_path, metadata=_metadata_dict)
903903

904904
@property
@@ -1394,7 +1394,7 @@ async def _set_selection(
13941394
if isinstance(array_like, np._typing._SupportsArrayFunc):
13951395
# TODO: need to handle array types that don't support __array_function__
13961396
# like PyTorch and JAX
1397-
array_like_ = cast(np._typing._SupportsArrayFunc, array_like)
1397+
array_like_ = cast("np._typing._SupportsArrayFunc", array_like)
13981398
value = np.asanyarray(value, dtype=self.metadata.dtype, like=array_like_)
13991399
else:
14001400
if not hasattr(value, "shape"):
@@ -1408,7 +1408,7 @@ async def _set_selection(
14081408
value = value.astype(dtype=self.metadata.dtype, order="A")
14091409
else:
14101410
value = np.array(value, dtype=self.metadata.dtype, order="A")
1411-
value = cast(NDArrayLike, value)
1411+
value = cast("NDArrayLike", value)
14121412
# We accept any ndarray like object from the user and convert it
14131413
# to a NDBuffer (or subclass). From this point onwards, we only pass
14141414
# Buffer and NDBuffer between components.
@@ -2431,11 +2431,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLikeOrScalar:
24312431
"""
24322432
fields, pure_selection = pop_fields(selection)
24332433
if is_pure_fancy_indexing(pure_selection, self.ndim):
2434-
return self.vindex[cast(CoordinateSelection | MaskSelection, selection)]
2434+
return self.vindex[cast("CoordinateSelection | MaskSelection", selection)]
24352435
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
24362436
return self.get_orthogonal_selection(pure_selection, fields=fields)
24372437
else:
2438-
return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields)
2438+
return self.get_basic_selection(cast("BasicSelection", pure_selection), fields=fields)
24392439

24402440
def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
24412441
"""Modify data for an item or region of the array.
@@ -2530,11 +2530,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
25302530
"""
25312531
fields, pure_selection = pop_fields(selection)
25322532
if is_pure_fancy_indexing(pure_selection, self.ndim):
2533-
self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value
2533+
self.vindex[cast("CoordinateSelection | MaskSelection", selection)] = value
25342534
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
25352535
self.set_orthogonal_selection(pure_selection, value, fields=fields)
25362536
else:
2537-
self.set_basic_selection(cast(BasicSelection, pure_selection), value, fields=fields)
2537+
self.set_basic_selection(cast("BasicSelection", pure_selection), value, fields=fields)
25382538

25392539
@_deprecate_positional_args
25402540
def get_basic_selection(
@@ -3652,7 +3652,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array:
36523652
# TODO: remove this cast when type inference improves
36533653
new_array = sync(self._async_array.update_attributes(new_attributes))
36543654
# TODO: remove this cast when type inference improves
3655-
_new_array = cast(AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata], new_array)
3655+
_new_array = cast("AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]", new_array)
36563656
return type(self)(_new_array)
36573657

36583658
def __repr__(self) -> str:
@@ -4238,7 +4238,7 @@ async def init_array(
42384238
serializer=serializer,
42394239
dtype=dtype_parsed,
42404240
)
4241-
sub_codecs = cast(tuple[Codec, ...], (*array_array, array_bytes, *bytes_bytes))
4241+
sub_codecs = cast("tuple[Codec, ...]", (*array_array, array_bytes, *bytes_bytes))
42424242
codecs_out: tuple[Codec, ...]
42434243
if shard_shape_parsed is not None:
42444244
index_location = None
@@ -4509,7 +4509,7 @@ def _parse_keep_array_attr(
45094509
compressors = "auto"
45104510
if serializer == "keep":
45114511
if zarr_format == 3 and data.metadata.zarr_format == 3:
4512-
serializer = cast(SerializerLike, data.serializer)
4512+
serializer = cast("SerializerLike", data.serializer)
45134513
else:
45144514
serializer = "auto"
45154515
if fill_value is None:
@@ -4687,7 +4687,7 @@ def _parse_chunk_encoding_v3(
46874687
if isinstance(filters, dict | Codec):
46884688
maybe_array_array = (filters,)
46894689
else:
4690-
maybe_array_array = cast(Iterable[Codec | dict[str, JSON]], filters)
4690+
maybe_array_array = cast("Iterable[Codec | dict[str, JSON]]", filters)
46914691
out_array_array = tuple(_parse_array_array_codec(c) for c in maybe_array_array)
46924692

46934693
if serializer == "auto":
@@ -4704,7 +4704,7 @@ def _parse_chunk_encoding_v3(
47044704
if isinstance(compressors, dict | Codec):
47054705
maybe_bytes_bytes = (compressors,)
47064706
else:
4707-
maybe_bytes_bytes = cast(Iterable[Codec | dict[str, JSON]], compressors)
4707+
maybe_bytes_bytes = cast("Iterable[Codec | dict[str, JSON]]", compressors)
47084708

47094709
out_bytes_bytes = tuple(_parse_bytes_bytes_codec(c) for c in maybe_bytes_bytes)
47104710

src/zarr/core/array_spec.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def from_dict(cls, data: ArrayConfigParams) -> Self:
6464
"""
6565
kwargs_out: ArrayConfigParams = {}
6666
for f in fields(ArrayConfig):
67-
field_name = cast(Literal["order", "write_empty_chunks"], f.name)
67+
field_name = cast("Literal['order', 'write_empty_chunks']", f.name)
6868
if field_name not in data:
6969
kwargs_out[field_name] = zarr_config.get(f"array.{field_name}")
7070
else:

src/zarr/core/buffer/core.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ def create_zero_length(cls) -> Self:
159159
if cls is Buffer:
160160
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
161161
return cls(
162-
cast(ArrayLike, None)
162+
cast("ArrayLike", None)
163163
) # This line will never be reached, but it satisfies the type checker
164164

165165
@classmethod
@@ -207,7 +207,7 @@ def from_buffer(cls, buffer: Buffer) -> Self:
207207
if cls is Buffer:
208208
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
209209
return cls(
210-
cast(ArrayLike, None)
210+
cast("ArrayLike", None)
211211
) # This line will never be reached, but it satisfies the type checker
212212

213213
@classmethod
@@ -227,7 +227,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
227227
if cls is Buffer:
228228
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
229229
return cls(
230-
cast(ArrayLike, None)
230+
cast("ArrayLike", None)
231231
) # This line will never be reached, but it satisfies the type checker
232232

233233
def as_array_like(self) -> ArrayLike:
@@ -358,7 +358,7 @@ def create(
358358
"Cannot call abstract method on the abstract class 'NDBuffer'"
359359
)
360360
return cls(
361-
cast(NDArrayLike, None)
361+
cast("NDArrayLike", None)
362362
) # This line will never be reached, but it satisfies the type checker
363363

364364
@classmethod
@@ -395,7 +395,7 @@ def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self:
395395
"Cannot call abstract method on the abstract class 'NDBuffer'"
396396
)
397397
return cls(
398-
cast(NDArrayLike, None)
398+
cast("NDArrayLike", None)
399399
) # This line will never be reached, but it satisfies the type checker
400400

401401
def as_ndarray_like(self) -> NDArrayLike:
@@ -427,7 +427,7 @@ def as_scalar(self) -> ScalarType:
427427
"""Returns the buffer as a scalar value"""
428428
if self._data.size != 1:
429429
raise ValueError("Buffer does not contain a single scalar value")
430-
return cast(ScalarType, self.as_numpy_array()[()])
430+
return cast("ScalarType", self.as_numpy_array()[()])
431431

432432
@property
433433
def dtype(self) -> np.dtype[Any]:

src/zarr/core/buffer/gpu.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
103103
return cls.from_array_like(cp.frombuffer(bytes_like, dtype="B"))
104104

105105
def as_numpy_array(self) -> npt.NDArray[Any]:
106-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
106+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
107107

108108
def __add__(self, other: core.Buffer) -> Self:
109109
other_array = other.as_array_like()
@@ -204,7 +204,7 @@ def as_numpy_array(self) -> npt.NDArray[Any]:
204204
-------
205205
NumPy array of this buffer (might be a data copy)
206206
"""
207-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
207+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
208208

209209
def __getitem__(self, key: Any) -> Self:
210210
return self.__class__(self._data.__getitem__(key))

src/zarr/core/chunk_key_encodings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
def parse_separator(data: JSON) -> SeparatorLiteral:
2121
if data not in (".", "/"):
2222
raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.")
23-
return cast(SeparatorLiteral, data)
23+
return cast("SeparatorLiteral", data)
2424

2525

2626
class ChunkKeyEncodingParams(TypedDict):
@@ -48,7 +48,7 @@ def from_dict(cls, data: dict[str, JSON] | ChunkKeyEncodingLike) -> ChunkKeyEnco
4848
data = {"name": data["name"], "configuration": {"separator": data["separator"]}}
4949

5050
# TODO: remove this cast when we are statically typing the JSON metadata completely.
51-
data = cast(dict[str, JSON], data)
51+
data = cast("dict[str, JSON]", data)
5252

5353
# configuration is optional for chunk key encodings
5454
name_parsed, config_parsed = parse_named_configuration(data, require_configuration=False)

src/zarr/core/common.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def parse_fill_value(data: Any) -> Any:
157157

158158
def parse_order(data: Any) -> Literal["C", "F"]:
159159
if data in ("C", "F"):
160-
return cast(Literal["C", "F"], data)
160+
return cast("Literal['C', 'F']", data)
161161
raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.")
162162

163163

@@ -201,4 +201,4 @@ def _warn_order_kwarg() -> None:
201201

202202
def _default_zarr_format() -> ZarrFormat:
203203
"""Return the default zarr_version"""
204-
return cast(ZarrFormat, int(zarr_config.get("default_zarr_format", 3)))
204+
return cast("ZarrFormat", int(zarr_config.get("default_zarr_format", 3)))

0 commit comments

Comments
 (0)