Skip to content

Blacken notebooks #5890

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Oct 24, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions doc/examples/ERA5-GRIB-example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
"metadata": {},
"outputs": [],
"source": [
"ds = xr.tutorial.load_dataset('era5-2mt-2019-03-uk.grib', engine='cfgrib')"
"ds = xr.tutorial.load_dataset(\"era5-2mt-2019-03-uk.grib\", engine=\"cfgrib\")"
]
},
{
Expand Down Expand Up @@ -72,11 +72,14 @@
"source": [
"import cartopy.crs as ccrs\n",
"import cartopy\n",
"fig = plt.figure(figsize=(10,10))\n",
"\n",
"fig = plt.figure(figsize=(10, 10))\n",
"ax = plt.axes(projection=ccrs.Robinson())\n",
"ax.coastlines(resolution='10m')\n",
"plot = ds.t2m[0].plot(cmap=plt.cm.coolwarm, transform=ccrs.PlateCarree(), cbar_kwargs={'shrink':0.6})\n",
"plt.title('ERA5 - 2m temperature British Isles March 2019')"
"ax.coastlines(resolution=\"10m\")\n",
"plot = ds.t2m[0].plot(\n",
" cmap=plt.cm.coolwarm, transform=ccrs.PlateCarree(), cbar_kwargs={\"shrink\": 0.6}\n",
")\n",
"plt.title(\"ERA5 - 2m temperature British Isles March 2019\")"
]
},
{
Expand All @@ -92,8 +95,8 @@
"metadata": {},
"outputs": [],
"source": [
"ds.t2m.sel(longitude=0,latitude=51.5).plot()\n",
"plt.title('ERA5 - London 2m temperature March 2019')"
"ds.t2m.sel(longitude=0, latitude=51.5).plot()\n",
"plt.title(\"ERA5 - London 2m temperature March 2019\")"
]
}
],
Expand Down
23 changes: 13 additions & 10 deletions doc/examples/ROMS_ocean_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"import cartopy.crs as ccrs\n",
"import cartopy.feature as cfeature\n",
"import matplotlib.pyplot as plt\n",
"\n",
"%matplotlib inline\n",
"\n",
"import xarray as xr"
Expand Down Expand Up @@ -73,9 +74,9 @@
"outputs": [],
"source": [
"# load in the file\n",
"ds = xr.tutorial.open_dataset('ROMS_example.nc', chunks={'ocean_time': 1})\n",
"ds = xr.tutorial.open_dataset(\"ROMS_example.nc\", chunks={\"ocean_time\": 1})\n",
"\n",
"# This is a way to turn on chunking and lazy evaluation. Opening with mfdataset, or \n",
"# This is a way to turn on chunking and lazy evaluation. Opening with mfdataset, or\n",
"# setting the chunking in the open_dataset would also achive this.\n",
"ds"
]
Expand Down Expand Up @@ -105,12 +106,12 @@
"source": [
"if ds.Vtransform == 1:\n",
" Zo_rho = ds.hc * (ds.s_rho - ds.Cs_r) + ds.Cs_r * ds.h\n",
" z_rho = Zo_rho + ds.zeta * (1 + Zo_rho/ds.h)\n",
" z_rho = Zo_rho + ds.zeta * (1 + Zo_rho / ds.h)\n",
"elif ds.Vtransform == 2:\n",
" Zo_rho = (ds.hc * ds.s_rho + ds.Cs_r * ds.h) / (ds.hc + ds.h)\n",
" z_rho = ds.zeta + (ds.zeta + ds.h) * Zo_rho\n",
"\n",
"ds.coords['z_rho'] = z_rho.transpose() # needing transpose seems to be an xarray bug\n",
"ds.coords[\"z_rho\"] = z_rho.transpose() # needing transpose seems to be an xarray bug\n",
"ds.salt"
]
},
Expand Down Expand Up @@ -148,7 +149,7 @@
"outputs": [],
"source": [
"section = ds.salt.isel(xi_rho=50, eta_rho=slice(0, 167), ocean_time=0)\n",
"section.plot(x='lon_rho', y='z_rho', figsize=(15, 6), clim=(25, 35))\n",
"section.plot(x=\"lon_rho\", y=\"z_rho\", figsize=(15, 6), clim=(25, 35))\n",
"plt.ylim([-100, 1]);"
]
},
Expand All @@ -167,7 +168,7 @@
"metadata": {},
"outputs": [],
"source": [
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x='lon_rho', y='lat_rho')"
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x=\"lon_rho\", y=\"lat_rho\")"
]
},
{
Expand All @@ -186,11 +187,13 @@
"proj = ccrs.LambertConformal(central_longitude=-92, central_latitude=29)\n",
"fig = plt.figure(figsize=(15, 5))\n",
"ax = plt.axes(projection=proj)\n",
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(x='lon_rho', y='lat_rho', \n",
" transform=ccrs.PlateCarree())\n",
"ds.salt.isel(s_rho=-1, ocean_time=0).plot(\n",
" x=\"lon_rho\", y=\"lat_rho\", transform=ccrs.PlateCarree()\n",
")\n",
"\n",
"coast_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',\n",
" edgecolor='k', facecolor='0.8')\n",
"coast_10m = cfeature.NaturalEarthFeature(\n",
" \"physical\", \"land\", \"10m\", edgecolor=\"k\", facecolor=\"0.8\"\n",
")\n",
"ax.add_feature(coast_10m)"
]
},
Expand Down
4 changes: 3 additions & 1 deletion doc/examples/apply_ufunc_vectorize_1d.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -674,7 +674,9 @@
" exclude_dims=set((dim,)), # dimensions allowed to change size. Must be a set!\n",
" # vectorize=True, # not needed since numba takes care of vectorizing\n",
" dask=\"parallelized\",\n",
" output_dtypes=[data.dtype], # one per output; could also be float or np.dtype(\"float64\")\n",
" output_dtypes=[\n",
" data.dtype\n",
" ], # one per output; could also be float or np.dtype(\"float64\")\n",
" ).rename({\"__newdim__\": dim})\n",
" interped[dim] = newdim # need to add this manually\n",
"\n",
Expand Down
89 changes: 54 additions & 35 deletions doc/examples/monthly-means.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"import numpy as np\n",
"import pandas as pd\n",
"import xarray as xr\n",
"import matplotlib.pyplot as plt "
"import matplotlib.pyplot as plt"
]
},
{
Expand All @@ -50,7 +50,7 @@
},
"outputs": [],
"source": [
"ds = xr.tutorial.open_dataset('rasm').load()\n",
"ds = xr.tutorial.open_dataset(\"rasm\").load()\n",
"ds"
]
},
Expand Down Expand Up @@ -88,13 +88,15 @@
"outputs": [],
"source": [
"# Calculate the weights by grouping by 'time.season'.\n",
"weights = month_length.groupby('time.season') / month_length.groupby('time.season').sum()\n",
"weights = (\n",
" month_length.groupby(\"time.season\") / month_length.groupby(\"time.season\").sum()\n",
")\n",
"\n",
"# Test that the sum of the weights for each season is 1.0\n",
"np.testing.assert_allclose(weights.groupby('time.season').sum().values, np.ones(4))\n",
"np.testing.assert_allclose(weights.groupby(\"time.season\").sum().values, np.ones(4))\n",
"\n",
"# Calculate the weighted average\n",
"ds_weighted = (ds * weights).groupby('time.season').sum(dim='time')"
"ds_weighted = (ds * weights).groupby(\"time.season\").sum(dim=\"time\")"
]
},
{
Expand Down Expand Up @@ -123,7 +125,7 @@
"outputs": [],
"source": [
"# only used for comparisons\n",
"ds_unweighted = ds.groupby('time.season').mean('time')\n",
"ds_unweighted = ds.groupby(\"time.season\").mean(\"time\")\n",
"ds_diff = ds_weighted - ds_unweighted"
]
},
Expand All @@ -139,39 +141,54 @@
"outputs": [],
"source": [
"# Quick plot to show the results\n",
"notnull = pd.notnull(ds_unweighted['Tair'][0])\n",
"\n",
"fig, axes = plt.subplots(nrows=4, ncols=3, figsize=(14,12))\n",
"for i, season in enumerate(('DJF', 'MAM', 'JJA', 'SON')):\n",
" ds_weighted['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 0], vmin=-30, vmax=30, cmap='Spectral_r', \n",
" add_colorbar=True, extend='both')\n",
" \n",
" ds_unweighted['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 1], vmin=-30, vmax=30, cmap='Spectral_r', \n",
" add_colorbar=True, extend='both')\n",
"\n",
" ds_diff['Tair'].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 2], vmin=-0.1, vmax=.1, cmap='RdBu_r',\n",
" add_colorbar=True, extend='both')\n",
"notnull = pd.notnull(ds_unweighted[\"Tair\"][0])\n",
"\n",
"fig, axes = plt.subplots(nrows=4, ncols=3, figsize=(14, 12))\n",
"for i, season in enumerate((\"DJF\", \"MAM\", \"JJA\", \"SON\")):\n",
" ds_weighted[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 0],\n",
" vmin=-30,\n",
" vmax=30,\n",
" cmap=\"Spectral_r\",\n",
" add_colorbar=True,\n",
" extend=\"both\",\n",
" )\n",
"\n",
" ds_unweighted[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 1],\n",
" vmin=-30,\n",
" vmax=30,\n",
" cmap=\"Spectral_r\",\n",
" add_colorbar=True,\n",
" extend=\"both\",\n",
" )\n",
"\n",
" ds_diff[\"Tair\"].sel(season=season).where(notnull).plot.pcolormesh(\n",
" ax=axes[i, 2],\n",
" vmin=-0.1,\n",
" vmax=0.1,\n",
" cmap=\"RdBu_r\",\n",
" add_colorbar=True,\n",
" extend=\"both\",\n",
" )\n",
"\n",
" axes[i, 0].set_ylabel(season)\n",
" axes[i, 1].set_ylabel('')\n",
" axes[i, 2].set_ylabel('')\n",
" axes[i, 1].set_ylabel(\"\")\n",
" axes[i, 2].set_ylabel(\"\")\n",
"\n",
"for ax in axes.flat:\n",
" ax.axes.get_xaxis().set_ticklabels([])\n",
" ax.axes.get_yaxis().set_ticklabels([])\n",
" ax.axes.axis('tight')\n",
" ax.set_xlabel('')\n",
" \n",
"axes[0, 0].set_title('Weighted by DPM')\n",
"axes[0, 1].set_title('Equal Weighting')\n",
"axes[0, 2].set_title('Difference')\n",
" \n",
" ax.axes.axis(\"tight\")\n",
" ax.set_xlabel(\"\")\n",
"\n",
"axes[0, 0].set_title(\"Weighted by DPM\")\n",
"axes[0, 1].set_title(\"Equal Weighting\")\n",
"axes[0, 2].set_title(\"Difference\")\n",
"\n",
"plt.tight_layout()\n",
"\n",
"fig.suptitle('Seasonal Surface Air Temperature', fontsize=16, y=1.02)"
"fig.suptitle(\"Seasonal Surface Air Temperature\", fontsize=16, y=1.02)"
]
},
{
Expand All @@ -186,18 +203,20 @@
"outputs": [],
"source": [
"# Wrap it into a simple function\n",
"def season_mean(ds, calendar='standard'):\n",
"def season_mean(ds, calendar=\"standard\"):\n",
" # Make a DataArray with the number of days in each month, size = len(time)\n",
" month_length = ds.time.dt.days_in_month\n",
"\n",
" # Calculate the weights by grouping by 'time.season'\n",
" weights = month_length.groupby('time.season') / month_length.groupby('time.season').sum()\n",
" weights = (\n",
" month_length.groupby(\"time.season\") / month_length.groupby(\"time.season\").sum()\n",
" )\n",
"\n",
" # Test that the sum of the weights for each season is 1.0\n",
" np.testing.assert_allclose(weights.groupby('time.season').sum().values, np.ones(4))\n",
" np.testing.assert_allclose(weights.groupby(\"time.season\").sum().values, np.ones(4))\n",
"\n",
" # Calculate the weighted average\n",
" return (ds * weights).groupby('time.season').sum(dim='time')"
" return (ds * weights).groupby(\"time.season\").sum(dim=\"time\")"
]
}
],
Expand Down
20 changes: 12 additions & 8 deletions doc/examples/multidimensional-coords.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
},
"outputs": [],
"source": [
"ds = xr.tutorial.open_dataset('rasm').load()\n",
"ds = xr.tutorial.open_dataset(\"rasm\").load()\n",
"ds"
]
},
Expand Down Expand Up @@ -94,7 +94,7 @@
},
"outputs": [],
"source": [
"fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(14,4))\n",
"fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(14, 4))\n",
"ds.xc.plot(ax=ax1)\n",
"ds.yc.plot(ax=ax2)"
]
Expand Down Expand Up @@ -140,12 +140,14 @@
},
"outputs": [],
"source": [
"plt.figure(figsize=(14,6))\n",
"plt.figure(figsize=(14, 6))\n",
"ax = plt.axes(projection=ccrs.PlateCarree())\n",
"ax.set_global()\n",
"ds.Tair[0].plot.pcolormesh(ax=ax, transform=ccrs.PlateCarree(), x='xc', y='yc', add_colorbar=False)\n",
"ds.Tair[0].plot.pcolormesh(\n",
" ax=ax, transform=ccrs.PlateCarree(), x=\"xc\", y=\"yc\", add_colorbar=False\n",
")\n",
"ax.coastlines()\n",
"ax.set_ylim([0,90]);"
"ax.set_ylim([0, 90]);"
]
},
{
Expand All @@ -169,11 +171,13 @@
"outputs": [],
"source": [
"# define two-degree wide latitude bins\n",
"lat_bins = np.arange(0,91,2)\n",
"lat_bins = np.arange(0, 91, 2)\n",
"# define a label for each bin corresponding to the central latitude\n",
"lat_center = np.arange(1,90,2)\n",
"lat_center = np.arange(1, 90, 2)\n",
"# group according to those bins and take the mean\n",
"Tair_lat_mean = ds.Tair.groupby_bins('xc', lat_bins, labels=lat_center).mean(dim=xr.ALL_DIMS)\n",
"Tair_lat_mean = ds.Tair.groupby_bins(\"xc\", lat_bins, labels=lat_center).mean(\n",
" dim=xr.ALL_DIMS\n",
")\n",
"# plot the result\n",
"Tair_lat_mean.plot()"
]
Expand Down
Loading