Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ci/requirements-py27.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ dependencies:
- coveralls
- pytest-mock
- pytest-timeout
- pvfactors==0.1.5
- pvfactors==1.0.1
2 changes: 1 addition & 1 deletion ci/requirements-py35.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ dependencies:
- coveralls
- pytest-mock
- pytest-timeout
- pvfactors==0.1.5
- pvfactors==1.0.1
2 changes: 1 addition & 1 deletion ci/requirements-py36.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ dependencies:
- coveralls
- pytest-mock
- pytest-timeout
- pvfactors==0.1.5
- pvfactors==1.0.1
2 changes: 1 addition & 1 deletion ci/requirements-py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ dependencies:
- coveralls
- pytest-mock
- pytest-timeout
- pvfactors==0.1.5
- pvfactors==1.0.1
3 changes: 3 additions & 0 deletions docs/sphinx/source/whatsnew/v0.6.2.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ API Changes
* Remove automatic column name mapping from :py:func:`~pvlib.iotools.read_midc`
and :py:func:`~pvlib.iotools.read_midc_raw_data_from_nrel` and added
optional keyword argument `variable_map` to map columns.
* Update :py:func:`~pvlib.bifacial.pvfactors_timeseries` and tests to use
``pvfactors`` v1.0.1 (:issue:`699`)

Enhancements
~~~~~~~~~~~~
Expand Down Expand Up @@ -81,3 +83,4 @@ Contributors
* :ghuser:`yxh289`
* Jonathan Gaffiot (:ghuser:`jgaffiot`)
* Leland Boeman (:ghuser: `lboeman`)
* Marc Anoma (:ghuser: `anomam`)
99 changes: 70 additions & 29 deletions pvlib/bifacial.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,17 @@
"""

import pandas as pd
import numpy as np


def pvfactors_timeseries(
solar_azimuth, solar_zenith, surface_azimuth, surface_tilt,
axis_azimuth,
timestamps, dni, dhi, gcr, pvrow_height, pvrow_width, albedo,
n_pvrows=3, index_observed_pvrow=1,
rho_front_pvrow=0.03, rho_back_pvrow=0.05,
horizon_band_angle=15.,
run_parallel_calculations=True, n_workers_for_parallel_calcs=None):
run_parallel_calculations=True, n_workers_for_parallel_calcs=2):
"""
Calculate front and back surface plane-of-array irradiance on
a fixed tilt or single-axis tracker PV array configuration, and using
Expand All @@ -31,6 +33,9 @@ def pvfactors_timeseries(
convention (deg)
surface_tilt: numeric
Tilt angle of the PV modules, going from 0 to 180 (deg)
axis_azimuth: float
Azimuth angle of the rotation axis of the PV modules, using pvlib's
convention (deg). This is supposed to be fixed for all timestamps.
timestamps: datetime or DatetimeIndex
List of simulation timestamps
dni: numeric
Expand Down Expand Up @@ -59,9 +64,9 @@ def pvfactors_timeseries(
run_parallel_calculations: bool, default True
pvfactors is capable of using multiprocessing. Use this flag to decide
to run calculations in parallel (recommended) or not.
n_workers_for_parallel_calcs: int, default None
n_workers_for_parallel_calcs: int, default 2
Number of workers to use in the case of parallel calculations. The
default value of 'None' will lead to using a value equal to the number
'-1' value will lead to using a value equal to the number
of CPU's on the machine running the model.

Returns
Expand All @@ -86,63 +91,99 @@ def pvfactors_timeseries(
Photovoltaic Specialist Conference. 2017.
"""

# Convert pandas Series inputs to numpy arrays
# Convert pandas Series inputs (and some lists) to numpy arrays
if isinstance(solar_azimuth, pd.Series):
solar_azimuth = solar_azimuth.values
elif isinstance(solar_azimuth, list):
solar_azimuth = np.array(solar_azimuth)
if isinstance(solar_zenith, pd.Series):
solar_zenith = solar_zenith.values
if isinstance(surface_azimuth, pd.Series):
surface_azimuth = surface_azimuth.values
elif isinstance(surface_azimuth, list):
surface_azimuth = np.array(surface_azimuth)
if isinstance(surface_tilt, pd.Series):
surface_tilt = surface_tilt.values
if isinstance(dni, pd.Series):
dni = dni.values
if isinstance(dhi, pd.Series):
dhi = dhi.values
if isinstance(solar_azimuth, list):
solar_azimuth = np.array(solar_azimuth)

# Import pvfactors functions for timeseries calculations.
from pvfactors.timeseries import (calculate_radiosities_parallel_perez,
calculate_radiosities_serially_perez,
get_average_pvrow_outputs)
idx_slice = pd.IndexSlice
from pvfactors.run import (run_timeseries_engine,
run_parallel_engine)

# Build up pv array configuration parameters
pvarray_parameters = {
'n_pvrows': n_pvrows,
'axis_azimuth': axis_azimuth,
'pvrow_height': pvrow_height,
'pvrow_width': pvrow_width,
'gcr': gcr,
'rho_ground': albedo,
'rho_front_pvrow': rho_front_pvrow,
'rho_back_pvrow': rho_back_pvrow,
'horizon_band_angle': horizon_band_angle
}

# Run pvfactors calculations: either in parallel or serially
if run_parallel_calculations:
df_registries, df_custom_perez = calculate_radiosities_parallel_perez(
pvarray_parameters, timestamps, solar_zenith, solar_azimuth,
surface_tilt, surface_azimuth, dni, dhi,
n_processes=n_workers_for_parallel_calcs)
report = run_parallel_engine(
PVFactorsReportBuilder, pvarray_parameters,
timestamps, dni, dhi,
solar_zenith, solar_azimuth,
surface_tilt, surface_azimuth,
albedo, n_processes=n_workers_for_parallel_calcs)
else:
inputs = (pvarray_parameters, timestamps, solar_zenith, solar_azimuth,
surface_tilt, surface_azimuth, dni, dhi)
df_registries, df_custom_perez = calculate_radiosities_serially_perez(
inputs)
report = run_timeseries_engine(
PVFactorsReportBuilder.build, pvarray_parameters,
timestamps, dni, dhi,
solar_zenith, solar_azimuth,
surface_tilt, surface_azimuth,
albedo)

# Get the average surface outputs
df_outputs = get_average_pvrow_outputs(df_registries,
values=['qinc'],
include_shading=True)
# Turn report into dataframe
df_report = pd.DataFrame(report, index=timestamps)

# Select the calculated outputs from the pvrow to observe
ipoa_front = df_outputs.loc[:, idx_slice[index_observed_pvrow,
'front', 'qinc']]
return df_report.total_inc_front, df_report.total_inc_back

ipoa_back = df_outputs.loc[:, idx_slice[index_observed_pvrow,
'back', 'qinc']]

# Set timestamps as index of df_registries for consistency of outputs
df_registries = df_registries.set_index('timestamps')
class PVFactorsReportBuilder(object):
"""In pvfactors, a class is required to build reports when running
calculations with multiprocessing because of python constraints"""

return ipoa_front, ipoa_back, df_registries
@staticmethod
def build(report, pvarray):
"""Reports will have total incident irradiance on front and
back surface of center pvrow (index=1)"""
# Initialize the report as a dictionary
if report is None:
list_keys = ['total_inc_back', 'total_inc_front']
report = {key: [] for key in list_keys}
# Add elements to the report
if pvarray is not None:
pvrow = pvarray.pvrows[1] # use center pvrow
report['total_inc_back'].append(
pvrow.back.get_param_weighted('qinc'))
report['total_inc_front'].append(
pvrow.front.get_param_weighted('qinc'))
else:
# No calculation is performed when the sun is down
report['total_inc_back'].append(np.nan)
report['total_inc_front'].append(np.nan)

return report

@staticmethod
def merge(reports):
"""Works for dictionary reports"""
report = reports[0]
# Merge only if more than 1 report
if len(reports) > 1:
keys_report = list(reports[0].keys())
for other_report in reports[1:]:
if other_report is not None:
for key in keys_report:
report[key] += other_report[key]
return report
71 changes: 27 additions & 44 deletions pvlib/test/test_bifacial.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,13 @@
from datetime import datetime
from pvlib.bifacial import pvfactors_timeseries
from conftest import requires_pvfactors
import pytest


@requires_pvfactors
def test_pvfactors_timeseries():
@pytest.mark.parametrize('run_parallel_calculations',
[False, True])
def test_pvfactors_timeseries(run_parallel_calculations):
""" Test that pvfactors is functional, using the TLDR section inputs of the
package github repo README.md file:
https://github.com/SunPower/pvfactors/blob/master/README.md#tldr---quick-start"""
Expand All @@ -18,6 +21,7 @@ def test_pvfactors_timeseries():
solar_azimuth = [110., 140.]
surface_tilt = [10., 0.]
surface_azimuth = [90., 90.]
axis_azimuth = 0.
dni = [1000., 300.]
dhi = [50., 500.]
gcr = 0.4
Expand All @@ -31,42 +35,32 @@ def test_pvfactors_timeseries():
horizon_band_angle = 15.

# Expected values
expected_ipoa_front = pd.Series([1034.96216923, 795.4423259],
expected_ipoa_front = pd.Series([1034.95474708997, 795.4423259036623],
index=timestamps,
name=(1, 'front', 'qinc'))
expected_ipoa_back = pd.Series([92.11871485, 70.39404124],
name=('total_inc_front'))
expected_ipoa_back = pd.Series([91.88707460262768, 78.05831585685215],
index=timestamps,
name=(1, 'back', 'qinc'))
name=('total_inc_back'))

# Test serial calculations
ipoa_front, ipoa_back, df_registries = pvfactors_timeseries(
# Run calculation
ipoa_front, ipoa_back = pvfactors_timeseries(
solar_azimuth, solar_zenith, surface_azimuth, surface_tilt,
axis_azimuth,
timestamps, dni, dhi, gcr, pvrow_height, pvrow_width, albedo,
n_pvrows=n_pvrows, index_observed_pvrow=index_observed_pvrow,
rho_front_pvrow=rho_front_pvrow, rho_back_pvrow=rho_back_pvrow,
horizon_band_angle=horizon_band_angle,
run_parallel_calculations=False, n_workers_for_parallel_calcs=None)
run_parallel_calculations=run_parallel_calculations,
n_workers_for_parallel_calcs=-1)

pd.testing.assert_series_equal(ipoa_front, expected_ipoa_front)
pd.testing.assert_series_equal(ipoa_back, expected_ipoa_back)
pd.testing.assert_index_equal(timestamps, df_registries.index.unique())

# Run calculations in parallel
ipoa_front, ipoa_back, df_registries = pvfactors_timeseries(
solar_azimuth, solar_zenith, surface_azimuth, surface_tilt,
timestamps, dni, dhi, gcr, pvrow_height, pvrow_width, albedo,
n_pvrows=n_pvrows, index_observed_pvrow=index_observed_pvrow,
rho_front_pvrow=rho_front_pvrow, rho_back_pvrow=rho_back_pvrow,
horizon_band_angle=horizon_band_angle,
run_parallel_calculations=True, n_workers_for_parallel_calcs=None)

pd.testing.assert_series_equal(ipoa_front, expected_ipoa_front)
pd.testing.assert_series_equal(ipoa_back, expected_ipoa_back)
pd.testing.assert_index_equal(timestamps, df_registries.index.unique())


@requires_pvfactors
def test_pvfactors_timeseries_pandas_inputs():
@pytest.mark.parametrize('run_parallel_calculations',
[False, True])
def test_pvfactors_timeseries_pandas_inputs(run_parallel_calculations):
""" Test that pvfactors is functional, using the TLDR section inputs of the
package github repo README.md file, but converted to pandas Series:
https://github.com/SunPower/pvfactors/blob/master/README.md#tldr---quick-start"""
Expand All @@ -79,6 +73,7 @@ def test_pvfactors_timeseries_pandas_inputs():
solar_azimuth = pd.Series([110., 140.])
surface_tilt = pd.Series([10., 0.])
surface_azimuth = pd.Series([90., 90.])
axis_azimuth = 0.
dni = pd.Series([1000., 300.])
dhi = pd.Series([50., 500.])
gcr = 0.4
Expand All @@ -92,35 +87,23 @@ def test_pvfactors_timeseries_pandas_inputs():
horizon_band_angle = 15.

# Expected values
expected_ipoa_front = pd.Series([1034.96216923, 795.4423259],
expected_ipoa_front = pd.Series([1034.95474708997, 795.4423259036623],
index=timestamps,
name=(1, 'front', 'qinc'))
expected_ipoa_back = pd.Series([92.11871485, 70.39404124],
name=('total_inc_front'))
expected_ipoa_back = pd.Series([91.88707460262768, 78.05831585685215],
index=timestamps,
name=(1, 'back', 'qinc'))

# Test serial calculations
ipoa_front, ipoa_back, df_registries = pvfactors_timeseries(
solar_azimuth, solar_zenith, surface_azimuth, surface_tilt,
timestamps, dni, dhi, gcr, pvrow_height, pvrow_width, albedo,
n_pvrows=n_pvrows, index_observed_pvrow=index_observed_pvrow,
rho_front_pvrow=rho_front_pvrow, rho_back_pvrow=rho_back_pvrow,
horizon_band_angle=horizon_band_angle,
run_parallel_calculations=False, n_workers_for_parallel_calcs=None)

pd.testing.assert_series_equal(ipoa_front, expected_ipoa_front)
pd.testing.assert_series_equal(ipoa_back, expected_ipoa_back)
pd.testing.assert_index_equal(timestamps, df_registries.index.unique())
name=('total_inc_back'))

# Run calculations in parallel
ipoa_front, ipoa_back, df_registries = pvfactors_timeseries(
# Run calculation
ipoa_front, ipoa_back = pvfactors_timeseries(
solar_azimuth, solar_zenith, surface_azimuth, surface_tilt,
axis_azimuth,
timestamps, dni, dhi, gcr, pvrow_height, pvrow_width, albedo,
n_pvrows=n_pvrows, index_observed_pvrow=index_observed_pvrow,
rho_front_pvrow=rho_front_pvrow, rho_back_pvrow=rho_back_pvrow,
horizon_band_angle=horizon_band_angle,
run_parallel_calculations=True, n_workers_for_parallel_calcs=None)
run_parallel_calculations=run_parallel_calculations,
n_workers_for_parallel_calcs=-1)

pd.testing.assert_series_equal(ipoa_front, expected_ipoa_front)
pd.testing.assert_series_equal(ipoa_back, expected_ipoa_back)
pd.testing.assert_index_equal(timestamps, df_registries.index.unique())