Skip to content

Commit a2bf42c

Browse files
max-sixtysnowman2
authored andcommitted
Blacken notebooks (pydata#5890)
1 parent f700dd8 commit a2bf42c

File tree

6 files changed

+138
-95
lines changed

6 files changed

+138
-95
lines changed

doc/examples/ERA5-GRIB-example.ipynb

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
"metadata": {},
3838
"outputs": [],
3939
"source": [
40-
"ds = xr.tutorial.load_dataset('era5-2mt-2019-03-uk.grib', engine='cfgrib')"
40+
"ds = xr.tutorial.load_dataset(\"era5-2mt-2019-03-uk.grib\", engine=\"cfgrib\")"
4141
]
4242
},
4343
{
@@ -72,11 +72,14 @@
7272
"source": [
7373
"import cartopy.crs as ccrs\n",
7474
"import cartopy\n",
75-
"fig = plt.figure(figsize=(10,10))\n",
75+
"\n",
76+
"fig = plt.figure(figsize=(10, 10))\n",
7677
"ax = plt.axes(projection=ccrs.Robinson())\n",
77-
"ax.coastlines(resolution='10m')\n",
78-
"plot = ds.t2m[0].plot(cmap=plt.cm.coolwarm, transform=ccrs.PlateCarree(), cbar_kwargs={'shrink':0.6})\n",
79-
"plt.title('ERA5 - 2m temperature British Isles March 2019')"
78+
"ax.coastlines(resolution=\"10m\")\n",
79+
"plot = ds.t2m[0].plot(\n",
80+
" cmap=plt.cm.coolwarm, transform=ccrs.PlateCarree(), cbar_kwargs={\"shrink\": 0.6}\n",
81+
")\n",
82+
"plt.title(\"ERA5 - 2m temperature British Isles March 2019\")"
8083
]
8184
},
8285
{
@@ -92,8 +95,8 @@
9295
"metadata": {},
9396
"outputs": [],
9497
"source": [
95-
"ds.t2m.sel(longitude=0,latitude=51.5).plot()\n",
96-
"plt.title('ERA5 - London 2m temperature March 2019')"
98+
"ds.t2m.sel(longitude=0, latitude=51.5).plot()\n",
99+
"plt.title(\"ERA5 - London 2m temperature March 2019\")"
97100
]
98101
}
99102
],

doc/examples/ROMS_ocean_model.ipynb

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
"import cartopy.crs as ccrs\n",
2727
"import cartopy.feature as cfeature\n",
2828
"import matplotlib.pyplot as plt\n",
29+
"\n",
2930
"%matplotlib inline\n",
3031
"\n",
3132
"import xarray as xr"
@@ -73,9 +74,9 @@
7374
"outputs": [],
7475
"source": [
7576
"# load in the file\n",
76-
"ds = xr.tutorial.open_dataset('ROMS_example.nc', chunks={'ocean_time': 1})\n",
77+
"ds = xr.tutorial.open_dataset(\"ROMS_example.nc\", chunks={\"ocean_time\": 1})\n",
7778
"\n",
78-
"# This is a way to turn on chunking and lazy evaluation. Opening with mfdataset, or \n",
79+
"# This is a way to turn on chunking and lazy evaluation. Opening with mfdataset, or\n",
7980
"# setting the chunking in the open_dataset would also achive this.\n",
8081
"ds"
8182
]
@@ -105,12 +106,12 @@
105106
"source": [
106107
"if ds.Vtransform == 1:\n",
107108
" Zo_rho = ds.hc * (ds.s_rho - ds.Cs_r) + ds.Cs_r * ds.h\n",
108-
" z_rho = Zo_rho + ds.zeta * (1 + Zo_rho/ds.h)\n",
109+
" z_rho = Zo_rho + ds.zeta * (1 + Zo_rho / ds.h)\n",
109110
"elif ds.Vtransform == 2:\n",
110111
" Zo_rho = (ds.hc * ds.s_rho + ds.Cs_r * ds.h) / (ds.hc + ds.h)\n",
111112
" z_rho = ds.zeta + (ds.zeta + ds.h) * Zo_rho\n",
112113
"\n",
113-
"ds.coords['z_rho'] = z_rho.transpose() # needing transpose seems to be an xarray bug\n",
114+
"ds.coords[\"z_rho\"] = z_rho.transpose() # needing transpose seems to be an xarray bug\n",
114115
"ds.salt"
115116
]
116117
},
@@ -148,7 +149,7 @@
148149
"outputs": [],
149150
"source": [
150151
"section = ds.salt.isel(xi_rho=50, eta_rho=slice(0, 167), ocean_time=0)\n",
151-
"section.plot(x='lon_rho', y='z_rho', figsize=(15, 6), clim=(25, 35))\n",
152+
"section.plot(x=\"lon_rho\", y=\"z_rho\", figsize=(15, 6), clim=(25, 35))\n",
152153
"plt.ylim([-100, 1]);"
153154
]
154155
},
@@ -167,7 +168,7 @@
167168
"metadata": {},
168169
"outputs": [],
169170
"source": [
170-
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x='lon_rho', y='lat_rho')"
171+
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x=\"lon_rho\", y=\"lat_rho\")"
171172
]
172173
},
173174
{
@@ -186,11 +187,13 @@
186187
"proj = ccrs.LambertConformal(central_longitude=-92, central_latitude=29)\n",
187188
"fig = plt.figure(figsize=(15, 5))\n",
188189
"ax = plt.axes(projection=proj)\n",
189-
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x='lon_rho', y='lat_rho', \n",
190-
" transform=ccrs.PlateCarree())\n",
190+
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(\n",
191+
" x=\"lon_rho\", y=\"lat_rho\", transform=ccrs.PlateCarree()\n",
192+
")\n",
191193
"\n",
192-
"coast_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',\n",
193-
" edgecolor='k', facecolor='0.8')\n",
194+
"coast_10m = cfeature.NaturalEarthFeature(\n",
195+
" \"physical\", \"land\", \"10m\", edgecolor=\"k\", facecolor=\"0.8\"\n",
196+
")\n",
194197
"ax.add_feature(coast_10m)"
195198
]
196199
},

doc/examples/apply_ufunc_vectorize_1d.ipynb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -674,7 +674,9 @@
674674
" exclude_dims=set((dim,)), # dimensions allowed to change size. Must be a set!\n",
675675
" # vectorize=True, # not needed since numba takes care of vectorizing\n",
676676
" dask=\"parallelized\",\n",
677-
" output_dtypes=[data.dtype], # one per output; could also be float or np.dtype(\"float64\")\n",
677+
" output_dtypes=[\n",
678+
" data.dtype\n",
679+
" ], # one per output; could also be float or np.dtype(\"float64\")\n",
678680
" ).rename({\"__newdim__\": dim})\n",
679681
" interped[dim] = newdim # need to add this manually\n",
680682
"\n",

doc/examples/monthly-means.ipynb

Lines changed: 54 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
"import numpy as np\n",
3030
"import pandas as pd\n",
3131
"import xarray as xr\n",
32-
"import matplotlib.pyplot as plt "
32+
"import matplotlib.pyplot as plt"
3333
]
3434
},
3535
{
@@ -50,7 +50,7 @@
5050
},
5151
"outputs": [],
5252
"source": [
53-
"ds = xr.tutorial.open_dataset('rasm').load()\n",
53+
"ds = xr.tutorial.open_dataset(\"rasm\").load()\n",
5454
"ds"
5555
]
5656
},
@@ -88,13 +88,15 @@
8888
"outputs": [],
8989
"source": [
9090
"# Calculate the weights by grouping by 'time.season'.\n",
91-
"weights = month_length.groupby('time.season') / month_length.groupby('time.season').sum()\n",
91+
"weights = (\n",
92+
" month_length.groupby(\"time.season\") / month_length.groupby(\"time.season\").sum()\n",
93+
")\n",
9294
"\n",
9395
"# Test that the sum of the weights for each season is 1.0\n",
94-
"np.testing.assert_allclose(weights.groupby('time.season').sum().values, np.ones(4))\n",
96+
"np.testing.assert_allclose(weights.groupby(\"time.season\").sum().values, np.ones(4))\n",
9597
"\n",
9698
"# Calculate the weighted average\n",
97-
"ds_weighted = (ds * weights).groupby('time.season').sum(dim='time')"
99+
"ds_weighted = (ds * weights).groupby(\"time.season\").sum(dim=\"time\")"
98100
]
99101
},
100102
{
@@ -123,7 +125,7 @@
123125
"outputs": [],
124126
"source": [
125127
"# only used for comparisons\n",
126-
"ds_unweighted = ds.groupby('time.season').mean('time')\n",
128+
"ds_unweighted = ds.groupby(\"time.season\").mean(\"time\")\n",
127129
"ds_diff = ds_weighted - ds_unweighted"
128130
]
129131
},
@@ -139,39 +141,54 @@
139141
"outputs": [],
140142
"source": [
141143
"# Quick plot to show the results\n",
142-
"notnull = pd.notnull(ds_unweighted['Tair'][0])\n",
143-
"\n",
144-
"fig, axes = plt.subplots(nrows=4, ncols=3, figsize=(14,12))\n",
145-
"for i, season in enumerate(('DJF', 'MAM', 'JJA', 'SON')):\n",
146-
" ds_weighted['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
147-
" ax=axes[i, 0], vmin=-30, vmax=30, cmap='Spectral_r', \n",
148-
" add_colorbar=True, extend='both')\n",
149-
" \n",
150-
" ds_unweighted['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
151-
" ax=axes[i, 1], vmin=-30, vmax=30, cmap='Spectral_r', \n",
152-
" add_colorbar=True, extend='both')\n",
153-
"\n",
154-
" ds_diff['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
155-
" ax=axes[i, 2], vmin=-0.1, vmax=.1, cmap='RdBu_r',\n",
156-
" add_colorbar=True, extend='both')\n",
144+
"notnull = pd.notnull(ds_unweighted[\"Tair\"][0])\n",
145+
"\n",
146+
"fig, axes = plt.subplots(nrows=4, ncols=3, figsize=(14, 12))\n",
147+
"for i, season in enumerate((\"DJF\", \"MAM\", \"JJA\", \"SON\")):\n",
148+
" ds_weighted[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
149+
" ax=axes[i, 0],\n",
150+
" vmin=-30,\n",
151+
" vmax=30,\n",
152+
" cmap=\"Spectral_r\",\n",
153+
" add_colorbar=True,\n",
154+
" extend=\"both\",\n",
155+
" )\n",
156+
"\n",
157+
" ds_unweighted[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
158+
" ax=axes[i, 1],\n",
159+
" vmin=-30,\n",
160+
" vmax=30,\n",
161+
" cmap=\"Spectral_r\",\n",
162+
" add_colorbar=True,\n",
163+
" extend=\"both\",\n",
164+
" )\n",
165+
"\n",
166+
" ds_diff[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
167+
" ax=axes[i, 2],\n",
168+
" vmin=-0.1,\n",
169+
" vmax=0.1,\n",
170+
" cmap=\"RdBu_r\",\n",
171+
" add_colorbar=True,\n",
172+
" extend=\"both\",\n",
173+
" )\n",
157174
"\n",
158175
" axes[i, 0].set_ylabel(season)\n",
159-
" axes[i, 1].set_ylabel('')\n",
160-
" axes[i, 2].set_ylabel('')\n",
176+
" axes[i, 1].set_ylabel(\"\")\n",
177+
" axes[i, 2].set_ylabel(\"\")\n",
161178
"\n",
162179
"for ax in axes.flat:\n",
163180
" ax.axes.get_xaxis().set_ticklabels([])\n",
164181
" ax.axes.get_yaxis().set_ticklabels([])\n",
165-
" ax.axes.axis('tight')\n",
166-
" ax.set_xlabel('')\n",
167-
" \n",
168-
"axes[0, 0].set_title('Weighted by DPM')\n",
169-
"axes[0, 1].set_title('Equal Weighting')\n",
170-
"axes[0, 2].set_title('Difference')\n",
171-
" \n",
182+
" ax.axes.axis(\"tight\")\n",
183+
" ax.set_xlabel(\"\")\n",
184+
"\n",
185+
"axes[0, 0].set_title(\"Weighted by DPM\")\n",
186+
"axes[0, 1].set_title(\"Equal Weighting\")\n",
187+
"axes[0, 2].set_title(\"Difference\")\n",
188+
"\n",
172189
"plt.tight_layout()\n",
173190
"\n",
174-
"fig.suptitle('Seasonal Surface Air Temperature', fontsize=16, y=1.02)"
191+
"fig.suptitle(\"Seasonal Surface Air Temperature\", fontsize=16, y=1.02)"
175192
]
176193
},
177194
{
@@ -186,18 +203,20 @@
186203
"outputs": [],
187204
"source": [
188205
"# Wrap it into a simple function\n",
189-
"def season_mean(ds, calendar='standard'):\n",
206+
"def season_mean(ds, calendar=\"standard\"):\n",
190207
" # Make a DataArray with the number of days in each month, size = len(time)\n",
191208
" month_length = ds.time.dt.days_in_month\n",
192209
"\n",
193210
" # Calculate the weights by grouping by 'time.season'\n",
194-
" weights = month_length.groupby('time.season') / month_length.groupby('time.season').sum()\n",
211+
" weights = (\n",
212+
" month_length.groupby(\"time.season\") / month_length.groupby(\"time.season\").sum()\n",
213+
" )\n",
195214
"\n",
196215
" # Test that the sum of the weights for each season is 1.0\n",
197-
" np.testing.assert_allclose(weights.groupby('time.season').sum().values, np.ones(4))\n",
216+
" np.testing.assert_allclose(weights.groupby(\"time.season\").sum().values, np.ones(4))\n",
198217
"\n",
199218
" # Calculate the weighted average\n",
200-
" return (ds * weights).groupby('time.season').sum(dim='time')"
219+
" return (ds * weights).groupby(\"time.season\").sum(dim=\"time\")"
201220
]
202221
}
203222
],

doc/examples/multidimensional-coords.ipynb

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
},
4949
"outputs": [],
5050
"source": [
51-
"ds = xr.tutorial.open_dataset('rasm').load()\n",
51+
"ds = xr.tutorial.open_dataset(\"rasm\").load()\n",
5252
"ds"
5353
]
5454
},
@@ -94,7 +94,7 @@
9494
},
9595
"outputs": [],
9696
"source": [
97-
"fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(14,4))\n",
97+
"fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(14, 4))\n",
9898
"ds.xc.plot(ax=ax1)\n",
9999
"ds.yc.plot(ax=ax2)"
100100
]
@@ -140,12 +140,14 @@
140140
},
141141
"outputs": [],
142142
"source": [
143-
"plt.figure(figsize=(14,6))\n",
143+
"plt.figure(figsize=(14, 6))\n",
144144
"ax = plt.axes(projection=ccrs.PlateCarree())\n",
145145
"ax.set_global()\n",
146-
"ds.Tair[0].plot.pcolormesh(ax=ax, transform=ccrs.PlateCarree(), x='xc', y='yc', add_colorbar=False)\n",
146+
"ds.Tair[0].plot.pcolormesh(\n",
147+
" ax=ax, transform=ccrs.PlateCarree(), x=\"xc\", y=\"yc\", add_colorbar=False\n",
148+
")\n",
147149
"ax.coastlines()\n",
148-
"ax.set_ylim([0,90]);"
150+
"ax.set_ylim([0, 90]);"
149151
]
150152
},
151153
{
@@ -169,11 +171,13 @@
169171
"outputs": [],
170172
"source": [
171173
"# define two-degree wide latitude bins\n",
172-
"lat_bins = np.arange(0,91,2)\n",
174+
"lat_bins = np.arange(0, 91, 2)\n",
173175
"# define a label for each bin corresponding to the central latitude\n",
174-
"lat_center = np.arange(1,90,2)\n",
176+
"lat_center = np.arange(1, 90, 2)\n",
175177
"# group according to those bins and take the mean\n",
176-
"Tair_lat_mean = ds.Tair.groupby_bins('xc', lat_bins, labels=lat_center).mean(dim=xr.ALL_DIMS)\n",
178+
"Tair_lat_mean = ds.Tair.groupby_bins(\"xc\", lat_bins, labels=lat_center).mean(\n",
179+
" dim=xr.ALL_DIMS\n",
180+
")\n",
177181
"# plot the result\n",
178182
"Tair_lat_mean.plot()"
179183
]

0 commit comments

Comments
 (0)