Skip to content

Commit 0ed9e1a

Browse files
committed
Change read_psm3 to parse_psm3, remove ability to read files, add small example of reading files
1 parent 3d8faf3 commit 0ed9e1a

File tree

5 files changed

+56
-66
lines changed

5 files changed

+56
-66
lines changed

docs/sphinx/source/api.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ relevant to solar energy modeling.
369369
iotools.read_crn
370370
iotools.read_solrad
371371
iotools.get_psm3
372-
iotools.read_psm3
372+
iotools.parse_psm3
373373

374374
A :py:class:`~pvlib.location.Location` object may be created from metadata
375375
in some files.

docs/sphinx/source/whatsnew/v0.7.1.rst

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ API Changes
99

1010
Enhancements
1111
~~~~~~~~~~~~
12-
* Added :py:func:`~pvlib.iotools.read_psm3` to read local NSRDB PSM3 files.
13-
(:issue:`841`)
12+
* Added :py:func:`~pvlib.iotools.parse_psm3` to parse local NSRDB PSM3
13+
file-like objects. (:issue:`841`)
1414
* Added `leap_day` parameter to `iotools.get_psm3` instead of hardcoding it as
1515
False.
1616

@@ -22,7 +22,7 @@ Bug fixes
2222
Testing
2323
~~~~~~~
2424
* Added single-year PSM3 API test for `iotools.get_psm3`.
25-
* Added tests for `iotools.read_psm3`.
25+
* Added tests for `iotools.parse_psm3`.
2626

2727
Documentation
2828
~~~~~~~~~~~~~

pvlib/iotools/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@
1111
from pvlib.iotools.crn import read_crn # noqa: F401
1212
from pvlib.iotools.solrad import read_solrad # noqa: F401
1313
from pvlib.iotools.psm3 import get_psm3 # noqa: F401
14-
from pvlib.iotools.psm3 import read_psm3 # noqa: F401
14+
from pvlib.iotools.psm3 import parse_psm3 # noqa: F401

pvlib/iotools/psm3.py

+43-49
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60,
9191
9292
See Also
9393
--------
94-
pvlib.iotools.read_psm3
94+
pvlib.iotools.parse_psm3
9595
9696
References
9797
----------
@@ -142,17 +142,17 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60,
142142
# the CSV is in the response content as a UTF-8 bytestring
143143
# to use pandas we need to create a file buffer from the response
144144
fbuf = io.StringIO(response.content.decode('utf-8'))
145-
return read_psm3(fbuf)
145+
return parse_psm3(fbuf)
146146

147147

148-
def read_psm3(filename):
148+
def parse_psm3(fbuf):
149149
"""
150-
Read an NSRDB [1]_ PSM3 weather file (formatted as SAM CSV [2]_).
150+
Parse an NSRDB [1]_ PSM3 weather file (formatted as SAM CSV [2]_).
151151
152152
Parameters
153153
----------
154-
filename: string or file-like object
155-
Filename or file-like object of data to read.
154+
fbuf: file-like object
155+
File-like object containing data to read.
156156
157157
Returns
158158
-------
@@ -215,6 +215,12 @@ def read_psm3(filename):
215215
216216
The second item is a dataframe with the PSM3 timeseries data.
217217
218+
Examples
219+
--------
220+
>>> # Read a local PSM3 file:
221+
>>> with open(filename, 'r') as f: # doctest: +SKIP
222+
... metadata, df = iotools.parse_psm3(f) # doctest: +SKIP
223+
218224
See Also
219225
--------
220226
pvlib.iotools.get_psm3
@@ -226,48 +232,36 @@ def read_psm3(filename):
226232
.. [2] `Standard Time Series Data File Format
227233
<https://rredc.nrel.gov/solar/old_data/nsrdb/2005-2012/wfcsv.pdf>`_
228234
"""
229-
if hasattr(filename, 'readline'):
230-
# if passed a file-like object, not our job to close it
231-
close = False
232-
fbuf = filename
233-
else:
234-
close = True
235-
fbuf = open(filename, 'r')
236-
237-
try:
238-
# The first 2 lines of the response are headers with metadata
239-
header_fields = fbuf.readline().split(',')
240-
header_fields[-1] = header_fields[-1].strip() # strip trailing newline
241-
header_values = fbuf.readline().split(',')
242-
header_values[-1] = header_values[-1].strip() # strip trailing newline
243-
header = dict(zip(header_fields, header_values))
244-
# the response is all strings, so set some header types to numbers
245-
header['Local Time Zone'] = int(header['Local Time Zone'])
246-
header['Time Zone'] = int(header['Time Zone'])
247-
header['Latitude'] = float(header['Latitude'])
248-
header['Longitude'] = float(header['Longitude'])
249-
header['Elevation'] = int(header['Elevation'])
250-
# get the column names so we can set the dtypes
251-
columns = fbuf.readline().split(',')
252-
columns[-1] = columns[-1].strip() # strip trailing newline
253-
# Since the header has so many columns, excel saves blank cols in the
254-
# data below the header lines.
255-
columns = [col for col in columns if col != '']
256-
dtypes = dict.fromkeys(columns, float) # all floats except datevec
257-
dtypes.update(Year=int, Month=int, Day=int, Hour=int, Minute=int)
258-
dtypes['Cloud Type'] = int
259-
dtypes['Fill Flag'] = int
260-
data = pd.read_csv(
261-
fbuf, header=None, names=columns, usecols=columns, dtype=dtypes,
262-
delimiter=',', lineterminator='\n') # skip carriage returns \r
263-
# the response 1st 5 columns are a date vector, convert to datetime
264-
dtidx = pd.to_datetime(
265-
data[['Year', 'Month', 'Day', 'Hour', 'Minute']])
266-
# in USA all timezones are integers
267-
tz = 'Etc/GMT%+d' % -header['Time Zone']
268-
data.index = pd.DatetimeIndex(dtidx).tz_localize(tz)
269-
finally:
270-
if close:
271-
fbuf.close()
235+
# The first 2 lines of the response are headers with metadata
236+
header_fields = fbuf.readline().split(',')
237+
header_fields[-1] = header_fields[-1].strip() # strip trailing newline
238+
header_values = fbuf.readline().split(',')
239+
header_values[-1] = header_values[-1].strip() # strip trailing newline
240+
header = dict(zip(header_fields, header_values))
241+
# the response is all strings, so set some header types to numbers
242+
header['Local Time Zone'] = int(header['Local Time Zone'])
243+
header['Time Zone'] = int(header['Time Zone'])
244+
header['Latitude'] = float(header['Latitude'])
245+
header['Longitude'] = float(header['Longitude'])
246+
header['Elevation'] = int(header['Elevation'])
247+
# get the column names so we can set the dtypes
248+
columns = fbuf.readline().split(',')
249+
columns[-1] = columns[-1].strip() # strip trailing newline
250+
# Since the header has so many columns, excel saves blank cols in the
251+
# data below the header lines.
252+
columns = [col for col in columns if col != '']
253+
dtypes = dict.fromkeys(columns, float) # all floats except datevec
254+
dtypes.update(Year=int, Month=int, Day=int, Hour=int, Minute=int)
255+
dtypes['Cloud Type'] = int
256+
dtypes['Fill Flag'] = int
257+
data = pd.read_csv(
258+
fbuf, header=None, names=columns, usecols=columns, dtype=dtypes,
259+
delimiter=',', lineterminator='\n') # skip carriage returns \r
260+
# the response 1st 5 columns are a date vector, convert to datetime
261+
dtidx = pd.to_datetime(
262+
data[['Year', 'Month', 'Day', 'Hour', 'Minute']])
263+
# in USA all timezones are integers
264+
tz = 'Etc/GMT%+d' % -header['Time Zone']
265+
data.index = pd.DatetimeIndex(dtidx).tz_localize(tz)
272266

273267
return header, data

pvlib/test/test_psm3.py

+8-12
Original file line numberDiff line numberDiff line change
@@ -112,23 +112,19 @@ def test_get_psm3_singleyear():
112112
interval=15)
113113

114114

115-
@pytest.fixture(scope='module',
116-
params=[True, False])
115+
@pytest.fixture
117116
def io_input(request):
118-
"""Switch between filename and file-like object for read_psm3"""
119-
if request.param:
120-
with open(MANUAL_TEST_DATA, 'r') as f:
121-
data = f.read()
122-
obj = StringIO(data)
123-
else:
124-
obj = MANUAL_TEST_DATA
117+
"""file-like object for parse_psm3"""
118+
with open(MANUAL_TEST_DATA, 'r') as f:
119+
data = f.read()
120+
obj = StringIO(data)
125121
return obj
126122

127123

128124
@needs_pandas_0_22
129-
def test_read_psm3(io_input):
130-
"""test read_psm3"""
131-
header, data = psm3.read_psm3(io_input)
125+
def test_parse_psm3(io_input):
126+
"""test parse_psm3"""
127+
header, data = psm3.parse_psm3(io_input)
132128
expected = pd.read_csv(YEAR_TEST_DATA)
133129
# check datevec columns
134130
assert np.allclose(data.Year, expected.Year)

0 commit comments

Comments
 (0)