Skip to content

Commit 7d6a3f6

Browse files
Enable type hint checking (#5956)
* Enable type hint checking * Add whatsnew entry * Add class variable annotation * Update line numbers in doctests * Implement suggestions from code review * Update docs/src/whatsnew/latest.rst Co-authored-by: Martin Yeo <[email protected]> * Workaround for python/mypy#1465 Co-authored-by: Martin Yeo <[email protected]> * Also apply workaround for bounds * Add type hint --------- Co-authored-by: Martin Yeo <[email protected]>
1 parent 1bea71c commit 7d6a3f6

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+269
-189
lines changed

.pre-commit-config.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,14 @@ repos:
6262
- id: sort-all
6363
types: [file, python]
6464

65+
- repo: https://github.com/pre-commit/mirrors-mypy
66+
rev: 'v1.9.0'
67+
hooks:
68+
- id: mypy
69+
additional_dependencies:
70+
- 'types-requests'
71+
exclude: 'noxfile\.py|docs/src/conf\.py'
72+
6573
- repo: https://github.com/numpy/numpydoc
6674
rev: v1.7.0
6775
hooks:

benchmarks/asv_delegated_conda.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from pathlib import Path
1414
from shutil import copy2, copytree, rmtree
1515
from tempfile import TemporaryDirectory
16+
from typing import Callable
1617

1718
from asv import util as asv_util
1819
from asv.config import Config
@@ -99,6 +100,7 @@ def name(self):
99100
def _update_info(self) -> None:
100101
"""Make sure class properties reflect the actual environment being used."""
101102
# Follow symlink if it has been created.
103+
self._path: str
102104
actual_path = Path(self._path).resolve()
103105
self._path = str(actual_path)
104106

@@ -132,7 +134,7 @@ def copy_asv_files(src_parent: Path, dst_parent: Path) -> None:
132134
# happened. Also a non-issue when copying in the reverse
133135
# direction because the cache dir is temporary.
134136
if src_path.is_dir():
135-
func = copytree
137+
func: Callable = copytree
136138
else:
137139
func = copy2
138140
func(src_path, dst_path)

benchmarks/benchmarks/cube.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def time_create(self, _, cube_creation_strategy: str) -> None:
5555
new_cube.attributes = self.cube_kwargs["attributes"]
5656
new_cube.cell_methods = self.cube_kwargs["cell_methods"]
5757
for coord, dims in self.cube_kwargs["dim_coords_and_dims"]:
58-
coord: coords.DimCoord # Type hint to help linters.
58+
assert isinstance(coord, coords.DimCoord) # Type hint to help linters.
5959
new_cube.add_dim_coord(coord, dims)
6060
for coord, dims in self.cube_kwargs["aux_coords_and_dims"]:
6161
new_cube.add_aux_coord(coord, dims)

benchmarks/benchmarks/experimental/ugrid/regions_combine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,7 @@ def track_filesize_saved(self, n_cubesphere):
210210
return os.path.getsize("tmp.nc") * 1.0e-6
211211

212212

213-
CombineRegionsSaveData.track_filesize_saved.unit = "Mb"
213+
CombineRegionsSaveData.track_filesize_saved.unit = "Mb" # type: ignore[attr-defined]
214214

215215

216216
class CombineRegionsFileStreamedCalc(MixinCombineRegions):

benchmarks/benchmarks/load/__init__.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,16 @@
1515
class LoadAndRealise:
1616
# For data generation
1717
timeout = 600.0
18-
params = [
18+
params = (
1919
[(50, 50, 2), (1280, 960, 5), (2, 2, 1000)],
2020
[False, True],
2121
["FF", "PP", "NetCDF"],
22-
]
22+
)
2323
param_names = ["xyz", "compressed", "file_format"]
2424

2525
def setup_cache(self) -> dict:
2626
file_type_args = self.params[2]
27-
file_path_dict = {}
27+
file_path_dict: dict[tuple[int, int, int], dict[bool, dict[str, str]]] = {}
2828
for xyz in self.params[0]:
2929
file_path_dict[xyz] = {}
3030
x, y, z = xyz
@@ -59,7 +59,7 @@ def time_realise(self, _, __, ___, ____) -> None:
5959

6060
class STASHConstraint:
6161
# xyz sizes mimic LoadAndRealise to maximise file reuse.
62-
params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]]
62+
params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"])
6363
param_names = ["xyz", "file_format"]
6464

6565
def setup_cache(self) -> dict:
@@ -78,7 +78,7 @@ def time_stash_constraint(self, _, __, ___) -> None:
7878

7979

8080
class TimeConstraint:
81-
params = [[3, 20], ["FF", "PP", "NetCDF"]]
81+
params = ([3, 20], ["FF", "PP", "NetCDF"])
8282
param_names = ["time_dim_len", "file_format"]
8383

8484
def setup_cache(self) -> dict:
@@ -139,7 +139,7 @@ class StructuredFF:
139139
avoiding the cost of merging.
140140
"""
141141

142-
params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]]
142+
params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True])
143143
param_names = ["xyz", "structured_loading"]
144144

145145
def setup_cache(self) -> dict:

benchmarks/bm_runner.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def _setup_common() -> None:
136136

137137
def _asv_compare(*commits: str, overnight_mode: bool = False) -> None:
138138
"""Run through a list of commits comparing each one to the next."""
139-
commits = [commit[:8] for commit in commits]
139+
commits = tuple(commit[:8] for commit in commits)
140140
for i in range(len(commits) - 1):
141141
before = commits[i]
142142
after = commits[i + 1]
@@ -235,19 +235,19 @@ def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str)
235235

236236
for login_type in ("author", "mergedBy"):
237237
gh_query = f'.["{login_type}"]["login"]'
238-
command = shlex.split(
238+
commandlist = shlex.split(
239239
f"gh pr view {pr_tag[1:]} "
240240
f"--json {login_type} -q '{gh_query}' "
241241
f"--repo {repo}"
242242
)
243-
login = _subprocess_runner_capture(command)
243+
login = _subprocess_runner_capture(commandlist)
244244

245-
command = [
245+
commandlist = [
246246
"curl",
247247
"-s",
248248
f"https://api.github.com/users/{login}",
249249
]
250-
login_info = _subprocess_runner_capture(command)
250+
login_info = _subprocess_runner_capture(commandlist)
251251
is_user = '"type": "User"' in login_info
252252
if is_user:
253253
assignee = login
@@ -313,7 +313,7 @@ class _SubParserGenerator(ABC):
313313
description: str = NotImplemented
314314
epilog: str = NotImplemented
315315

316-
def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None:
316+
def __init__(self, subparsers: argparse._SubParsersAction[ArgumentParser]) -> None:
317317
self.subparser: ArgumentParser = subparsers.add_parser(
318318
self.name,
319319
description=self.description,
@@ -476,10 +476,12 @@ def csperf(args: argparse.Namespace, run_type: Literal["cperf", "sperf"]) -> Non
476476
environ["ON_DEMAND_BENCHMARKS"] = "True"
477477
commit_range = "upstream/main^!"
478478

479-
asv_command = ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}"
479+
asv_command_str = (
480+
ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}"
481+
)
480482

481483
# Only do a single round.
482-
asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command))
484+
asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command_str))
483485
try:
484486
_subprocess_runner([*asv_command, *args.asv_args], asv=True)
485487
except subprocess.CalledProcessError as err:
@@ -584,7 +586,7 @@ def func(args: argparse.Namespace) -> None:
584586
environ["DATA_GEN_PYTHON"] = str(python_path)
585587
_setup_common()
586588
# get path of data-gen environment, setup by previous call
587-
python_path = environ["DATA_GEN_PYTHON"]
589+
python_path = Path(environ["DATA_GEN_PYTHON"])
588590
# allow 'on-demand' benchmarks
589591
environ["ON_DEMAND_BENCHMARKS"] = "1"
590592
asv_command = [

docs/src/further_topics/filtering_warnings.rst

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,9 @@ Warnings:
4747

4848
>>> my_operation()
4949
...
50-
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
50+
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
5151
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)
52-
iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
52+
iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
5353
warnings.warn(
5454

5555
Warnings can be suppressed using the Python warnings filter with the ``ignore``
@@ -110,7 +110,7 @@ You can target specific Warning messages, e.g.
110110
... warnings.filterwarnings("ignore", message="Discarding false_easting")
111111
... my_operation()
112112
...
113-
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
113+
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
114114
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)
115115

116116
::
@@ -125,16 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g.
125125
.. doctest:: filtering_warnings
126126

127127
>>> with warnings.catch_warnings():
128-
... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=444)
128+
... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=445)
129129
... my_operation()
130130
...
131-
iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
131+
iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
132132
warnings.warn(
133133

134134
::
135135

136-
python -W ignore:::iris.coord_systems:444
137-
export PYTHONWARNINGS=ignore:::iris.coord_systems:444
136+
python -W ignore:::iris.coord_systems:445
137+
export PYTHONWARNINGS=ignore:::iris.coord_systems:445
138138

139139
Warnings from a Common Source
140140
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -188,7 +188,7 @@ module during execution:
188188
... )
189189
... my_operation()
190190
...
191-
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
191+
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
192192
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)
193193

194194
----

docs/src/whatsnew/latest.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,9 @@ This document explains the changes made to Iris for this release
145145

146146
#. `@rcomer`_ made some :meth:`~iris.cube.Cube.slices_over` tests go faster (:pull:`5973`)
147147

148+
#. `@bouweandela`_ enabled mypy checks for type hints.
149+
The entire team would like to thank Bouwe for putting in the hard
150+
work on an unglamorous but highly valuable contribution. (:pull:`5956`)
148151

149152
.. comment
150153
Whatsnew author names (@github name) in alphabetical order. Note that,

lib/iris/__init__.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def callback(cube, field, filename):
9494
import itertools
9595
import os.path
9696
import threading
97+
from typing import Callable, Literal
9798

9899
import iris._constraints
99100
import iris.config
@@ -189,7 +190,7 @@ def __repr__(self):
189190
return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs)
190191

191192
# deprecated_options = {'example_future_flag': 'warning',}
192-
deprecated_options = {}
193+
deprecated_options: dict[str, Literal["error", "warning"]] = {}
193194

194195
def __setattr__(self, name, value):
195196
if name in self.deprecated_options:
@@ -248,7 +249,10 @@ def context(self, **kwargs):
248249

249250
# Initialise the site configuration dictionary.
250251
#: Iris site configuration dictionary.
251-
site_configuration = {}
252+
site_configuration: dict[
253+
Literal["cf_profile", "cf_patch", "cf_patch_conventions"],
254+
Callable | Literal[False] | None,
255+
] = {}
252256

253257
try:
254258
from iris.site_config import update as _update

lib/iris/_lazy_data.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
"""
1010

1111
from functools import lru_cache, wraps
12+
from types import ModuleType
1213
from typing import Sequence
1314

1415
import dask
@@ -376,7 +377,7 @@ def _combine(
376377
lazy = any(is_lazy_data(a) for a in arrays)
377378
masked = any(is_masked_data(a) for a in arrays)
378379

379-
array_module = np
380+
array_module: ModuleType = np
380381
if masked:
381382
if lazy:
382383
# Avoid inconsistent array type when slicing resulting array

lib/iris/analysis/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1848,7 +1848,7 @@ def interp_order(length):
18481848
and lazy data.
18491849
18501850
"""
1851-
MAX_RUN.name = lambda: "max_run"
1851+
MAX_RUN.name = lambda: "max_run" # type: ignore[method-assign]
18521852

18531853

18541854
GMEAN = Aggregator("geometric_mean", scipy.stats.mstats.gmean)

lib/iris/analysis/maths.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -558,7 +558,7 @@ def exponentiate(cube, exponent, in_place=False):
558558
)
559559
if cube.has_lazy_data():
560560

561-
def power(data):
561+
def power(data, out=None):
562562
return operator.pow(data, exponent)
563563

564564
else:

lib/iris/common/metadata.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ class BaseMetadata(metaclass=_NamedTupleMeta):
141141

142142
DEFAULT_NAME = "unknown" # the fall-back name for metadata identity
143143

144-
_members = (
144+
_members: str | Iterable[str] = (
145145
"standard_name",
146146
"long_name",
147147
"var_name",
@@ -870,7 +870,7 @@ def equal(self, other, lenient=None):
870870
class CoordMetadata(BaseMetadata):
871871
"""Metadata container for a :class:`~iris.coords.Coord`."""
872872

873-
_members = ("coord_system", "climatological")
873+
_members: str | Iterable[str] = ("coord_system", "climatological")
874874

875875
__slots__ = ()
876876

lib/iris/common/resolve.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131

3232

3333
_AuxCoverage = namedtuple(
34-
"AuxCoverage",
34+
"_AuxCoverage",
3535
[
3636
"cube",
3737
"common_items_aux",
@@ -45,18 +45,18 @@
4545
)
4646

4747
_CategoryItems = namedtuple(
48-
"CategoryItems",
48+
"_CategoryItems",
4949
["items_dim", "items_aux", "items_scalar"],
5050
)
5151

5252
_DimCoverage = namedtuple(
53-
"DimCoverage",
53+
"_DimCoverage",
5454
["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"],
5555
)
5656

57-
_Item = namedtuple("Item", ["metadata", "coord", "dims"])
57+
_Item = namedtuple("_Item", ["metadata", "coord", "dims"])
5858

59-
_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"])
59+
_PreparedFactory = namedtuple("_PreparedFactory", ["container", "dependencies"])
6060

6161

6262
@dataclass
@@ -95,7 +95,7 @@ def create_coord(self, metadata):
9595
return result
9696

9797

98-
_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"])
98+
_PreparedMetadata = namedtuple("_PreparedMetadata", ["combined", "src", "tgt"])
9999

100100

101101
class Resolve:

lib/iris/coord_systems.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from abc import ABCMeta, abstractmethod
88
from functools import cached_property
99
import re
10+
from typing import ClassVar
1011
import warnings
1112

1213
import cartopy.crs as ccrs
@@ -48,7 +49,7 @@ def _float_or_None(arg):
4849
class CoordSystem(metaclass=ABCMeta):
4950
"""Abstract base class for coordinate systems."""
5051

51-
grid_mapping_name = None
52+
grid_mapping_name: ClassVar[str | None] = None
5253

5354
def __eq__(self, other):
5455
"""Override equality.

lib/iris/coords.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2704,7 +2704,12 @@ def _new_points_requirements(self, points):
27042704
emsg = "The {!r} {} points array must be strictly monotonic."
27052705
raise ValueError(emsg.format(self.name(), self.__class__.__name__))
27062706

2707-
@Coord._values.setter
2707+
@property
2708+
def _values(self):
2709+
# Overridden just to allow .setter override.
2710+
return super()._values
2711+
2712+
@_values.setter
27082713
def _values(self, points):
27092714
# DimCoord always realises the points, to allow monotonicity checks.
27102715
# Ensure it is an actual array, and also make our own copy so that we
@@ -2796,7 +2801,12 @@ def _new_bounds_requirements(self, bounds):
27962801

27972802
return bounds
27982803

2799-
@Coord.bounds.setter
2804+
@property
2805+
def bounds(self):
2806+
# Overridden just to allow .setter override.
2807+
return super().bounds
2808+
2809+
@bounds.setter
28002810
def bounds(self, bounds):
28012811
if bounds is not None:
28022812
# Ensure we have a realised array of new bounds values.

0 commit comments

Comments
 (0)