@@ -268,7 +268,7 @@ def __init__(
268268 if isinstance (metadata , dict ):
269269 zarr_format = metadata ["zarr_format" ]
270270 # TODO: remove this when we extensively type the dict representation of metadata
271- _metadata = cast (dict [str , JSON ], metadata )
271+ _metadata = cast (" dict[str, JSON]" , metadata )
272272 if zarr_format == 2 :
273273 metadata = ArrayV2Metadata .from_dict (_metadata )
274274 elif zarr_format == 3 :
@@ -898,7 +898,7 @@ async def open(
898898 store_path = await make_store_path (store )
899899 metadata_dict = await get_array_metadata (store_path , zarr_format = zarr_format )
900900 # TODO: remove this cast when we have better type hints
901- _metadata_dict = cast (ArrayV3MetadataDict , metadata_dict )
901+ _metadata_dict = cast (" ArrayV3MetadataDict" , metadata_dict )
902902 return cls (store_path = store_path , metadata = _metadata_dict )
903903
904904 @property
@@ -1394,7 +1394,7 @@ async def _set_selection(
13941394 if isinstance (array_like , np ._typing ._SupportsArrayFunc ):
13951395 # TODO: need to handle array types that don't support __array_function__
13961396 # like PyTorch and JAX
1397- array_like_ = cast (np ._typing ._SupportsArrayFunc , array_like )
1397+ array_like_ = cast (" np._typing._SupportsArrayFunc" , array_like )
13981398 value = np .asanyarray (value , dtype = self .metadata .dtype , like = array_like_ )
13991399 else :
14001400 if not hasattr (value , "shape" ):
@@ -1408,7 +1408,7 @@ async def _set_selection(
14081408 value = value .astype (dtype = self .metadata .dtype , order = "A" )
14091409 else :
14101410 value = np .array (value , dtype = self .metadata .dtype , order = "A" )
1411- value = cast (NDArrayLike , value )
1411+ value = cast (" NDArrayLike" , value )
14121412 # We accept any ndarray like object from the user and convert it
14131413 # to a NDBuffer (or subclass). From this point onwards, we only pass
14141414 # Buffer and NDBuffer between components.
@@ -2431,11 +2431,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLikeOrScalar:
24312431 """
24322432 fields , pure_selection = pop_fields (selection )
24332433 if is_pure_fancy_indexing (pure_selection , self .ndim ):
2434- return self .vindex [cast (CoordinateSelection | MaskSelection , selection )]
2434+ return self .vindex [cast (" CoordinateSelection | MaskSelection" , selection )]
24352435 elif is_pure_orthogonal_indexing (pure_selection , self .ndim ):
24362436 return self .get_orthogonal_selection (pure_selection , fields = fields )
24372437 else :
2438- return self .get_basic_selection (cast (BasicSelection , pure_selection ), fields = fields )
2438+ return self .get_basic_selection (cast (" BasicSelection" , pure_selection ), fields = fields )
24392439
24402440 def __setitem__ (self , selection : Selection , value : npt .ArrayLike ) -> None :
24412441 """Modify data for an item or region of the array.
@@ -2530,11 +2530,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
25302530 """
25312531 fields , pure_selection = pop_fields (selection )
25322532 if is_pure_fancy_indexing (pure_selection , self .ndim ):
2533- self .vindex [cast (CoordinateSelection | MaskSelection , selection )] = value
2533+ self .vindex [cast (" CoordinateSelection | MaskSelection" , selection )] = value
25342534 elif is_pure_orthogonal_indexing (pure_selection , self .ndim ):
25352535 self .set_orthogonal_selection (pure_selection , value , fields = fields )
25362536 else :
2537- self .set_basic_selection (cast (BasicSelection , pure_selection ), value , fields = fields )
2537+ self .set_basic_selection (cast (" BasicSelection" , pure_selection ), value , fields = fields )
25382538
25392539 @_deprecate_positional_args
25402540 def get_basic_selection (
@@ -3652,7 +3652,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array:
36523652 # TODO: remove this cast when type inference improves
36533653 new_array = sync (self ._async_array .update_attributes (new_attributes ))
36543654 # TODO: remove this cast when type inference improves
3655- _new_array = cast (AsyncArray [ArrayV2Metadata ] | AsyncArray [ArrayV3Metadata ], new_array )
3655+ _new_array = cast (" AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]" , new_array )
36563656 return type (self )(_new_array )
36573657
36583658 def __repr__ (self ) -> str :
@@ -4238,7 +4238,7 @@ async def init_array(
42384238 serializer = serializer ,
42394239 dtype = dtype_parsed ,
42404240 )
4241- sub_codecs = cast (tuple [Codec , ...], (* array_array , array_bytes , * bytes_bytes ))
4241+ sub_codecs = cast (" tuple[Codec, ...]" , (* array_array , array_bytes , * bytes_bytes ))
42424242 codecs_out : tuple [Codec , ...]
42434243 if shard_shape_parsed is not None :
42444244 index_location = None
@@ -4509,7 +4509,7 @@ def _parse_keep_array_attr(
45094509 compressors = "auto"
45104510 if serializer == "keep" :
45114511 if zarr_format == 3 and data .metadata .zarr_format == 3 :
4512- serializer = cast (SerializerLike , data .serializer )
4512+ serializer = cast (" SerializerLike" , data .serializer )
45134513 else :
45144514 serializer = "auto"
45154515 if fill_value is None :
@@ -4687,7 +4687,7 @@ def _parse_chunk_encoding_v3(
46874687 if isinstance (filters , dict | Codec ):
46884688 maybe_array_array = (filters ,)
46894689 else :
4690- maybe_array_array = cast (Iterable [Codec | dict [str , JSON ]], filters )
4690+ maybe_array_array = cast (" Iterable[Codec | dict[str, JSON]]" , filters )
46914691 out_array_array = tuple (_parse_array_array_codec (c ) for c in maybe_array_array )
46924692
46934693 if serializer == "auto" :
@@ -4704,7 +4704,7 @@ def _parse_chunk_encoding_v3(
47044704 if isinstance (compressors , dict | Codec ):
47054705 maybe_bytes_bytes = (compressors ,)
47064706 else :
4707- maybe_bytes_bytes = cast (Iterable [Codec | dict [str , JSON ]], compressors )
4707+ maybe_bytes_bytes = cast (" Iterable[Codec | dict[str, JSON]]" , compressors )
47084708
47094709 out_bytes_bytes = tuple (_parse_bytes_bytes_codec (c ) for c in maybe_bytes_bytes )
47104710
0 commit comments