From a5888e4755d9f0cb8d729460dd76098e931c89a5 Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sat, 28 Feb 2015 11:58:55 -0700 Subject: [PATCH 1/8] add ability to open tmy3 files via a url. required more flexible renaming --- pvlib/tmy.py | 117 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 72 insertions(+), 45 deletions(-) diff --git a/pvlib/tmy.py b/pvlib/tmy.py index 22bfda5e54..85f4a43784 100644 --- a/pvlib/tmy.py +++ b/pvlib/tmy.py @@ -5,11 +5,14 @@ import logging pvl_logger = logging.getLogger('pvlib') -import pdb import re import datetime import dateutil -import csv +import io +try: + from urllib2 import urlopen +except ImportError: + from urllib.request import urlopen import pandas as pd import numpy as np @@ -18,53 +21,57 @@ -def readtmy3(filename=None): +def readtmy3(filename=None, coerce_year=None, recolumn=True): ''' - Read a TMY3 file in to a pandas dataframe + Read a TMY3 file in to a pandas dataframe. Read a TMY3 file and make a pandas dataframe of the data. Note that values - contained in the struct are unchanged from the TMY3 file (i.e. units + contained in the metadata dictionary are unchanged from the TMY3 file (i.e. units are retained). In the case of any discrepencies between this documentation and the TMY3 User's Manual ([1]), the TMY3 User's Manual takes precedence. - If a filename is not provided, the user will be prompted to browse to - an appropriate TMY3 file. - Parameters ---------- - filename : string - An optional argument which allows the user to select which - TMY3 format file should be read. A file path may also be necessary if - the desired TMY3 file is not in the MATLAB working path. + filename : None or string + If None, attempts to use an interactive file browser. + A string can be a relative file path, absolute file path, + or url. + + coerce_year : None or int + If supplied, the year of the data will be coerced to this input. + + recolumn : bool + If True, apply standard names to TMY3 columns. + Typically this resulsts in stripping the units from the column name. + Returns ------- - TMYDATA : DataFrame + data : DataFrame - A pandas dataframe, is provided with the components in the table below. Note - that for more detailed descriptions of each component, please consult - the TMY3 User's Manual ([1]), especially tables 1-1 through 1-6. + A pandas dataframe with the columns described in the table below. + For more detailed descriptions of each component, please consult + the TMY3 User's Manual ([1]), especially tables 1-1 through 1-6. - meta : struct - struct of meta data is created, which contains all - site metadata available in the file + metadata : dict + The site metadata available in the file. Notes ----- =============== ====== =================== - meta field format description + key format description =============== ====== =================== - meta.altitude Float site elevation - meta.latitude Float site latitudeitude - meta.longitude Float site longitudeitude - meta.Name String site name - meta.State String state - meta.TZ Float timezone - meta.USAF Int USAF identifier + altitude Float site elevation + latitude Float site latitudeitude + longitude Float site longitudeitude + Name String site name + State String state + TZ Float UTC offset + USAF Int USAF identifier =============== ====== =================== ============================= ====================================================================================================================================================== @@ -147,24 +154,26 @@ def readtmy3(filename=None): [2] Wilcox, S. (2007). National Solar Radiation Database 1991 2005 Update: Users Manual. 472 pp.; NREL Report No. TP-581-41364. - - See also - --------- - - pvl_makelocationstruct - pvl_readtmy2 - ''' - if filename is None: #If no filename is input + if filename is None: try: filename = interactive_load() except: raise Exception('Interactive load failed. Tkinter not supported on this system. Try installing X-Quartz and reloading') - head = ['USAF','Name','State','TZ','latitude','longitude','altitude'] - headerfile = open(filename,'r') - meta = dict(zip(head,headerfile.readline().rstrip('\n').split(","))) #Read in file metadata + head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude'] + + try: + csvdata = open(filename, 'r') + except IOError: + response = urlopen(filename) + csvdata = io.StringIO(response.read().decode(errors='ignore')) + + # read in file metadata + meta = dict(zip(head, csvdata.readline().rstrip('\n').split(","))) + + # convert metadata strings to numeric types meta['altitude'] = float(meta['altitude']) meta['latitude'] = float(meta['latitude']) meta['longitude'] = float(meta['longitude']) @@ -174,8 +183,9 @@ def readtmy3(filename=None): TMYData = pd.read_csv(filename, header=1, parse_dates={'datetime':['Date (MM/DD/YYYY)','Time (HH:MM)']}, date_parser=parsedate, index_col='datetime') - - TMYData = recolumn(TMYData) #rename to standard column names + + if recolumn: + _recolumn(TMYData) #rename to standard column names TMYData = TMYData.tz_localize(int(meta['TZ']*3600)) @@ -214,8 +224,23 @@ def parsetz(UTC): -def recolumn(TMY3): - TMY3.columns = ('ETR','ETRN','GHI','GHISource','GHIUncertainty', +def _recolumn(tmy3_dataframe, inplace=True): + """ + Rename the columns of the TMY3 DataFrame. + + Parameters + ---------- + tmy3_dataframe : DataFrame + inplace : bool + passed to DataFrame.rename() + + Returns + ------- + Recolumned DataFrame. + """ + raw_columns = 'ETR (W/m^2),ETRN (W/m^2),GHI (W/m^2),GHI source,GHI uncert (%),DNI (W/m^2),DNI source,DNI uncert (%),DHI (W/m^2),DHI source,DHI uncert (%),GH illum (lx),GH illum source,Global illum uncert (%),DN illum (lx),DN illum source,DN illum uncert (%),DH illum (lx),DH illum source,DH illum uncert (%),Zenith lum (cd/m^2),Zenith lum source,Zenith lum uncert (%),TotCld (tenths),TotCld source,TotCld uncert (code),OpqCld (tenths),OpqCld source,OpqCld uncert (code),Dry-bulb (C),Dry-bulb source,Dry-bulb uncert (code),Dew-point (C),Dew-point source,Dew-point uncert (code),RHum (%),RHum source,RHum uncert (code),Pressure (mbar),Pressure source,Pressure uncert (code),Wdir (degrees),Wdir source,Wdir uncert (code),Wspd (m/s),Wspd source,Wspd uncert (code),Hvis (m),Hvis source,Hvis uncert (code),CeilHgt (m),CeilHgt source,CeilHgt uncert (code),Pwat (cm),Pwat source,Pwat uncert (code),AOD (unitless),AOD source,AOD uncert (code),Alb (unitless),Alb source,Alb uncert (code),Lprecip depth (mm),Lprecip quantity (hr),Lprecip source,Lprecip uncert (code),PresWth (METAR code),PresWth source,PresWth uncert (code)' + + new_columns = ['ETR','ETRN','GHI','GHISource','GHIUncertainty', 'DNI','DNISource','DNIUncertainty','DHI','DHISource','DHIUncertainty', 'GHillum','GHillumSource','GHillumUncertainty','DNillum','DNillumSource', 'DNillumUncertainty','DHillum','DHillumSource','DHillumUncertainty', @@ -228,9 +253,11 @@ def recolumn(TMY3): 'CeilHgt','CeilHgtSource','CeilHgtUncertainty','Pwat','PwatSource', 'PwatUncertainty','AOD','AODSource','AODUncertainty','Alb','AlbSource', 'AlbUncertainty','Lprecipdepth','Lprecipquantity','LprecipSource', - 'LprecipUncertainty') - - return TMY3 + 'LprecipUncertainty','PresWth','PresWth source','PresWth uncert'] + + mapping = dict(zip(raw_columns.split(','), new_columns)) + + return tmy3_dataframe.rename(columns=mapping, inplace=True) From c5261f9b39bd331da730745b2324a9eedd2b7754 Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sat, 28 Feb 2015 12:19:15 -0700 Subject: [PATCH 2/8] add _ to helper function names --- pvlib/tmy.py | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/pvlib/tmy.py b/pvlib/tmy.py index 85f4a43784..effe7cb7ac 100644 --- a/pvlib/tmy.py +++ b/pvlib/tmy.py @@ -158,7 +158,7 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): if filename is None: try: - filename = interactive_load() + filename = _interactive_load() except: raise Exception('Interactive load failed. Tkinter not supported on this system. Try installing X-Quartz and reloading') @@ -182,7 +182,7 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): TMYData = pd.read_csv(filename, header=1, parse_dates={'datetime':['Date (MM/DD/YYYY)','Time (HH:MM)']}, - date_parser=parsedate, index_col='datetime') + date_parser=_parsedate, index_col='datetime') if recolumn: _recolumn(TMYData) #rename to standard column names @@ -193,7 +193,7 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): -def interactive_load(): +def _interactive_load(): import Tkinter from tkFileDialog import askopenfilename Tkinter.Tk().withdraw() #Start interactive file input @@ -201,7 +201,7 @@ def interactive_load(): -def parsedate(ymd, hour): +def _parsedate(ymd, hour): # stupidly complicated due to TMY3's usage of hour 24 # and dateutil's inability to handle that. offset_hour = int(hour[:2]) - 1 @@ -212,18 +212,6 @@ def parsedate(ymd, hour): -def parsetz(UTC): - #currently not used, need to make these daylight savings unaware - TZinfo = {-5:'EST', - -6:'CST', - -7:'MST', - -8:'PST', - -9:'AKST', - -10:'HAST'} - return TZinfo[UTC] - - - def _recolumn(tmy3_dataframe, inplace=True): """ Rename the columns of the TMY3 DataFrame. From 63aba8063c5986de3bf9b270522e8b92ae5f178d Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sat, 28 Feb 2015 12:26:52 -0700 Subject: [PATCH 3/8] more minor clean up --- pvlib/tmy.py | 30 +++++++++++------------------- 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/pvlib/tmy.py b/pvlib/tmy.py index effe7cb7ac..0045d9270c 100644 --- a/pvlib/tmy.py +++ b/pvlib/tmy.py @@ -394,43 +394,35 @@ def readtmy2(filename): [1] Marion, W and Urban, K. "Wilcox, S and Marion, W. "User's Manual for TMY2s". NREL 1995. - - See also - -------- - - pvl_makelocationstruct - pvl_maketimestruct - pvl_readtmy3 - ''' if filename is None: #If no filename is input try: - filename = interactive_load() + filename = _interactive_load() except: raise Exception('Interactive load failed. Tkinter not supported on this system. Try installing X-Quartz and reloading') - string='%2d%2d%2d%2d%4d%4d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%2d%1s%1d%2d%1s%1d%4d%1s%1d%4d%1s%1d%3d%1s%1d%4d%1s%1d%3d%1s%1d%3d%1s%1d%4d%1s%1d%5d%1s%1d%10d%3d%1s%1d%3d%1s%1d%3d%1s%1d%2d%1s%1d' - columns='year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUnertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint' - hdr_columns='WBAN,City,State,TZ,latitude,longitude,altitude' + string = '%2d%2d%2d%2d%4d%4d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%2d%1s%1d%2d%1s%1d%4d%1s%1d%4d%1s%1d%3d%1s%1d%4d%1s%1d%3d%1s%1d%3d%1s%1d%4d%1s%1d%5d%1s%1d%10d%3d%1s%1d%3d%1s%1d%3d%1s%1d%2d%1s%1d' + columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUnertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint' + hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude' - TMY2, TMY2_meta = readTMY(string, columns, hdr_columns, filename) + TMY2, TMY2_meta = _readTMY2(string, columns, hdr_columns, filename) return TMY2, TMY2_meta -def parsemeta(columns,line): +def _parsemeta_tmy2(columns, line): """Retrieves metadata from the top line of the tmy2 file. Parameters ---------- - Columns : string - String of column headings in the header + columns : string + String of column headings in the header line : string - Header string containing DataFrame + Header string containing DataFrame Returns ------- @@ -454,7 +446,7 @@ def parsemeta(columns,line): -def readTMY(string, columns, hdr_columns, fname): +def _readTMY2(string, columns, hdr_columns, fname): head=1 date=[] with open(fname) as infile: @@ -462,7 +454,7 @@ def readTMY(string, columns, hdr_columns, fname): for line in infile: #Skip the header if head!=0: - meta=parsemeta(hdr_columns,line) + meta = _parsemeta_tmy2(hdr_columns,line) head-=1 continue #Reset the cursor and array for each line From 7c18634a8b70e66b4f8f73e7b7f731393f6c2b0b Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sat, 28 Feb 2015 12:27:13 -0700 Subject: [PATCH 4/8] add more tmy tests --- pvlib/test/test_tmy.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/pvlib/test/test_tmy.py b/pvlib/test/test_tmy.py index 1fe8d78215..87eba3457f 100644 --- a/pvlib/test/test_tmy.py +++ b/pvlib/test/test_tmy.py @@ -5,13 +5,27 @@ import os test_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) +tmy3_testfile = os.path.join(test_dir, '../data/703165TY.csv') +tmy2_testfile = os.path.join(test_dir, '../data/12839.tm2') from pvlib import tmy def test_readtmy3(): - tmy.readtmy3(os.path.join(test_dir, '../data/703165TY.csv')) + tmy.readtmy3(tmy3_testfile) + +def test_readtmy3_remote(): + url = 'http://rredc.nrel.gov/solar/old_data/nsrdb/1991-2005/data/tmy3/703165TYA.CSV' + tmy.readtmy3(url) + +def test_readtmy3_recolumn(): + data, meta = tmy.readtmy3(tmy3_testfile) + assert 'GHISource' in data.columns + +def test_readtmy3_norecolumn(): + data, meta = tmy.readtmy3(tmy3_testfile, recolumn=False) + assert 'GHI source' in data.columns def test_readtmy2(): - tmy.readtmy2(os.path.join(test_dir, '../data/12839.tm2')) + tmy.readtmy2(tmy2_testfile) From f2d92b1e3e3f06787988fc83f63b0c74a07cb5f5 Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sat, 28 Feb 2015 12:56:46 -0700 Subject: [PATCH 5/8] add support for coerce_year --- pvlib/test/test_tmy.py | 10 ++++++++++ pvlib/tmy.py | 7 +++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/pvlib/test/test_tmy.py b/pvlib/test/test_tmy.py index 87eba3457f..9234556096 100644 --- a/pvlib/test/test_tmy.py +++ b/pvlib/test/test_tmy.py @@ -26,6 +26,16 @@ def test_readtmy3_norecolumn(): data, meta = tmy.readtmy3(tmy3_testfile, recolumn=False) assert 'GHI source' in data.columns +def test_readtmy3_coerce_year(): + coerce_year = 1987 + data, meta = tmy.readtmy3(tmy3_testfile, coerce_year=coerce_year) + assert (data.index.year == 1987).all() + +def test_readtmy3_no_coerce_year(): + coerce_year = None + data, meta = tmy.readtmy3(tmy3_testfile, coerce_year=coerce_year) + assert 1997 and 1999 in data.index.year + def test_readtmy2(): tmy.readtmy2(tmy2_testfile) diff --git a/pvlib/tmy.py b/pvlib/tmy.py index 0045d9270c..03776e3c53 100644 --- a/pvlib/tmy.py +++ b/pvlib/tmy.py @@ -182,7 +182,8 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): TMYData = pd.read_csv(filename, header=1, parse_dates={'datetime':['Date (MM/DD/YYYY)','Time (HH:MM)']}, - date_parser=_parsedate, index_col='datetime') + date_parser=lambda *x: _parsedate(*x, year=coerce_year), + index_col='datetime') if recolumn: _recolumn(TMYData) #rename to standard column names @@ -201,13 +202,15 @@ def _interactive_load(): -def _parsedate(ymd, hour): +def _parsedate(ymd, hour, year=None): # stupidly complicated due to TMY3's usage of hour 24 # and dateutil's inability to handle that. offset_hour = int(hour[:2]) - 1 offset_datetime = '{} {}:00'.format(ymd, offset_hour) offset_date = dateutil.parser.parse(offset_datetime) true_date = offset_date + dateutil.relativedelta.relativedelta(hours=1) + if year is not None: + true_date = true_date.replace(year=year) return true_date From f3bebbe9f6e14e4cf2f1d75fff791642707531cb Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Sun, 1 Mar 2015 20:05:06 -0700 Subject: [PATCH 6/8] add network decorator --- pvlib/test/test_tmy.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pvlib/test/test_tmy.py b/pvlib/test/test_tmy.py index 9234556096..e7176dea01 100644 --- a/pvlib/test/test_tmy.py +++ b/pvlib/test/test_tmy.py @@ -4,6 +4,8 @@ import inspect import os +from pandas.util.testing import network + test_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) tmy3_testfile = os.path.join(test_dir, '../data/703165TY.csv') tmy2_testfile = os.path.join(test_dir, '../data/12839.tm2') @@ -13,7 +15,8 @@ def test_readtmy3(): tmy.readtmy3(tmy3_testfile) - + +@network(raise_on_error=True) def test_readtmy3_remote(): url = 'http://rredc.nrel.gov/solar/old_data/nsrdb/1991-2005/data/tmy3/703165TYA.CSV' tmy.readtmy3(url) From e336b3e1d51044f9d8015241a9b6f6feec5497ab Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Mon, 9 Mar 2015 17:16:27 -0700 Subject: [PATCH 7/8] doc clean up --- pvlib/tmy.py | 99 ++++++++++++++++++++++------------------------------ 1 file changed, 42 insertions(+), 57 deletions(-) diff --git a/pvlib/tmy.py b/pvlib/tmy.py index 03776e3c53..be70caa633 100644 --- a/pvlib/tmy.py +++ b/pvlib/tmy.py @@ -1,5 +1,5 @@ """ -Import TMY2 and TMY3 data. +Import functions for TMY2 and TMY3 data files. """ import logging @@ -25,33 +25,31 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): ''' Read a TMY3 file in to a pandas dataframe. - Read a TMY3 file and make a pandas dataframe of the data. Note that values - contained in the metadata dictionary are unchanged from the TMY3 file (i.e. units + Note that values contained in the metadata dictionary are + unchanged from the TMY3 file (i.e. units are retained). In the case of any discrepencies between this - documentation and the TMY3 User's Manual ([1]), the TMY3 User's Manual + documentation and the TMY3 User's Manual [1], the TMY3 User's Manual takes precedence. Parameters ---------- - filename : None or string - If None, attempts to use an interactive file browser. + If None, attempts to use a Tkinter file browser. A string can be a relative file path, absolute file path, or url. coerce_year : None or int - If supplied, the year of the data will be coerced to this input. + If supplied, the year of the data will be set to this value. recolumn : bool If True, apply standard names to TMY3 columns. - Typically this resulsts in stripping the units from the column name. + Typically this results in stripping the units from the column name. - Returns ------- - + Tuple of the form (data, metadata). + data : DataFrame - A pandas dataframe with the columns described in the table below. For more detailed descriptions of each component, please consult the TMY3 User's Manual ([1]), especially tables 1-1 through 1-6. @@ -62,6 +60,8 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True): Notes ----- + The returned structures have the following fields. + =============== ====== =================== key format description =============== ====== =================== @@ -251,73 +251,58 @@ def _recolumn(tmy3_dataframe, inplace=True): return tmy3_dataframe.rename(columns=mapping, inplace=True) - -######################### -# -# TMY2 below -# -######################### - - def readtmy2(filename): ''' - Read a TMY2 file in to a DataFrame + Read a TMY2 file in to a DataFrame. - Note that valuescontained in the DataFrame are unchanged from the TMY2 - file (i.e. units are retained). Time/Date and Location data imported from the + Note that values contained in the DataFrame are unchanged from the TMY2 + file (i.e. units are retained). Time/Date and location data imported from the TMY2 file have been modified to a "friendlier" form conforming to modern conventions (e.g. N latitude is postive, E longitude is positive, the "24th" hour of any day is technically the "0th" hour of the next day). In the case of any discrepencies between this documentation and the - TMY2 User's Manual ([1]), the TMY2 User's Manual takes precedence. - - If a filename is not provided, the user will be prompted to browse to - an appropriate TMY2 file. + TMY2 User's Manual [1], the TMY2 User's Manual takes precedence. Parameters ---------- - filename : string - - an optional argument which allows the user to select which - TMY2 format file should be read. A file path may also be necessary if - the desired TMY2 file is not in the working path. If filename - is not provided, the user will be prompted to browse to an - appropriate TMY2 file. + filename : None or string + If None, attempts to use a Tkinter file browser. + A string can be a relative file path, absolute file path, + or url. Returns ------- + Tuple of the form (data, metadata). + + data : DataFrame + A dataframe with the columns described in the table below. + For a more detailed descriptions of each component, please consult + the TMY2 User's Manual ([1]), especially tables 3-1 through 3-6, and + Appendix B. - TMYData : DataFrame - - A dataframe, is provided with the following components. Note - that for more detailed descriptions of each component, please consult - the TMY2 User's Manual ([1]), especially tables 3-1 through 3-6, and - Appendix B. - - meta : struct - - A struct containing the metadata from the TMY2 file. + metadata : dict + The site metadata available in the file. Notes ----- - The structures have the following fields + The returned structures have the following fields. - ============================ ============================================================ - meta Field - ============================ ============================================================ - meta.SiteID Site identifier code (WBAN number), scalar unsigned integer - meta.StationName Station name, 1x1 cell string - meta.StationState Station state 2 letter designator, 1x1 cell string - meta.SiteTimeZone Hours from Greenwich, scalar double - meta.latitude Latitude in decimal degrees, scalar double - meta.longitude Longitude in decimal degrees, scalar double - meta.SiteElevation Site elevation in meters, scalar double - ============================ ============================================================ + ============= ================================== + key description + ============= ================================== + SiteID Site identifier code (WBAN number) + StationName Station name + StationState Station state 2 letter designator + SiteTimeZone Hours from Greenwich + latitude Latitude in decimal degrees + longitude Longitude in decimal degrees + SiteElevation Site elevation in meters + ============= ================================== ============================ ========================================================================================================================================================================== - TMYData Field Meaning + TMYData field description ============================ ========================================================================================================================================================================== index Pandas timeseries object containing timestamps year @@ -399,7 +384,7 @@ def readtmy2(filename): for TMY2s". NREL 1995. ''' - if filename is None: #If no filename is input + if filename is None: try: filename = _interactive_load() except: From 8d6c6b2587277eac23b3bb959ce06e038689c948 Mon Sep 17 00:00:00 2001 From: Will Holmgren Date: Mon, 9 Mar 2015 17:33:01 -0700 Subject: [PATCH 8/8] fix my careless merge --- pvlib/test/test_tmy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/test/test_tmy.py b/pvlib/test/test_tmy.py index e7176dea01..de7b2d2e75 100644 --- a/pvlib/test/test_tmy.py +++ b/pvlib/test/test_tmy.py @@ -16,7 +16,7 @@ def test_readtmy3(): tmy.readtmy3(tmy3_testfile) -@network(raise_on_error=True) +@network def test_readtmy3_remote(): url = 'http://rredc.nrel.gov/solar/old_data/nsrdb/1991-2005/data/tmy3/703165TYA.CSV' tmy.readtmy3(url)