From d7deb80cdc5d1b63de5b2865a0c5cf24d4655fc1 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 22 Feb 2021 22:14:15 +0100 Subject: [PATCH 1/8] Add cams.get_cams_radiation function --- docs/sphinx/source/api.rst | 1 + docs/sphinx/source/whatsnew/v0.9.0.rst | 3 + pvlib/iotools/__init__.py | 1 + pvlib/iotools/cams.py | 207 +++++++++++++++++++++++++ 4 files changed, 212 insertions(+) create mode 100644 pvlib/iotools/cams.py diff --git a/docs/sphinx/source/api.rst b/docs/sphinx/source/api.rst index 8805d199a4..31204b6f0d 100644 --- a/docs/sphinx/source/api.rst +++ b/docs/sphinx/source/api.rst @@ -484,6 +484,7 @@ relevant to solar energy modeling. iotools.get_pvgis_tmy iotools.read_pvgis_tmy iotools.read_bsrn + iotools.get_cams_mcclear A :py:class:`~pvlib.location.Location` object may be created from metadata in some files. diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 81e7a0c60b..a4e2688bb0 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -64,6 +64,9 @@ Enhancements ~~~~~~~~~~~~ * Add :func:`~pvlib.iotools.read_bsrn` for reading BSRN solar radiation data files. (:pull:`1145`, :issue:`1015`) +* Add :func:`~pvlib.iotools.get_cams_radiation` for retrieving CAMS McClear + clear-sky radiation time series. + files. (:pull:`1145`, :issue:`1015`) * In :py:class:`~pvlib.modelchain.ModelChain`, attributes which contain output of models are now collected into ``ModelChain.results``. (:pull:`1076`, :issue:`1067`) diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py index ba5d5e8807..737ee66d4d 100644 --- a/pvlib/iotools/__init__.py +++ b/pvlib/iotools/__init__.py @@ -14,3 +14,4 @@ from pvlib.iotools.psm3 import parse_psm3 # noqa: F401 from pvlib.iotools.pvgis import get_pvgis_tmy, read_pvgis_tmy # noqa: F401 from pvlib.iotools.bsrn import read_bsrn # noqa: F401 +from pvlib.iotools.cams import get_cams_radiation # noqa: F401 diff --git a/pvlib/iotools/cams.py b/pvlib/iotools/cams.py new file mode 100644 index 0000000000..c802420623 --- /dev/null +++ b/pvlib/iotools/cams.py @@ -0,0 +1,207 @@ +"""Functions to access data from Copernicus Atmosphere Monitoring Service + (CAMS) radiation service. +.. codeauthor:: Adam R. Jensen +""" + +import pandas as pd +import requests +import io + + +MCCLEAR_COLUMNS = ['Observation period', 'TOA', 'Clear sky GHI', + 'Clear sky BHI', 'Clear sky DHI', 'Clear sky BNI'] + +MCCLEAR_VERBOSE_COLUMNS = ['sza', 'summer/winter split', 'tco3', 'tcwv', + 'AOD BC', 'AOD DU', 'AOD SS', 'AOD OR', 'AOD SU', + 'AOD NI', 'AOD AM', 'alpha', 'Aerosol type', + 'fiso', 'fvol', 'fgeo', 'albedo'] + +# Dictionary mapping CAMS MCCLEAR variables to pvlib names +MCCLEAR_VARIABLE_MAP = { + 'TOA': 'ghi_extra', + 'Clear sky GHI': 'ghi_clear', + 'Clear sky BHI': 'bhi_clear', + 'Clear sky DHI': 'dhi_clear', + 'Clear sky BNI': 'dni_clear', + 'sza': 'solar_zenith', +} + + +# Dictionary mapping Python time steps to CAMS time step format +TIME_STEPS = {'1min': 'PT01M', '15min': 'PT15M', '1h': 'PT01H', '1d': 'P01D', + '1M': 'P01M'} + +TIME_STEPS_HOURS = {'1min': 1/60, '15min': 15/60, '1h': 1, '1d': 24} + + +def get_cams_mcclear(start_date, end_date, latitude, longitude, email, + altitude=None, time_step='1h', time_ref='UT', + integrated=False, label=None, verbose=False, + map_variables=True, server='www.soda-is.com'): + """ + Retrieve time-series of clear-sky global, beam, and diffuse radiation + anywhere in the world from CAMS McClear [1]_ using the WGET service [2]_. + + + Geographical coverage: wordwide + Time coverage: 2004-01-01 to two days ago + Access: free, but requires registration, see [1]_ + Requests: max. 100 per day + + + Parameters + ---------- + start_date: datetime like + First day of the requested period + end_date: datetime like + Last day of the requested period + latitude: float + in decimal degrees, between -90 and 90, north is positive (ISO 19115) + longitude : float + in decimal degrees, between -180 and 180, east is positive (ISO 19115) + altitude: float, default: None + Altitude in meters. If None, then the altitude is determined from the + NASA SRTM database + email: str + Email address linked to a SoDa account + time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' + Time step of the time series, either 1 minute, 15 minute, hourly, + daily, or monthly. + time_reference: str, {'UT', 'TST'}, default: 'UT' + 'UT' (universal time) or 'TST' (True Solar Time) + integrated: boolean, default False + Whether to return integrated irradiation values (Wh/m^2) from CAMS or + average irradiance values (W/m^2) as is more commonly used + label: {‘right’, ‘left’}, default: None + Which bin edge label to label bucket with. The default is ‘left’ for + all frequency offsets except for ‘M’ which has a default of ‘right’. + verbose: boolean, default: False + Verbose mode outputs additional parameters (aerosols). Only avaiable + for 1 minute and universal time. See [1] for parameter description. + map_variables: bool, default: True + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable MCCLEAR_VARIABLE_MAP. + server: str, default: 'www.soda-is.com' + Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) + + + Notes + ---------- + The returned data Dataframe includes the following fields: + + ======================= ====== ========================================== + Key, mapped key Format Description + ======================= ====== ========================================== + **Mapped field names are returned when the map_variables argument is True** + -------------------------------------------------------------------------- + Observation period str Beginning/end of time period + TOA, ghi_extra float Horizontal radiation at top of atmosphere + Clear sky GHI, ghi_clear float Clear sky global radiation on horizontal + Clear sky BHI, bhi_clear float Clear sky beam radiation on horizontal + Clear sky DHI, dhi_clear float Clear sky diffuse radiation on horizontal + Clear sky BNI, dni_clear float Clear sky beam radiation normal to sun + ======================= ====== ========================================== + + For the returned units see the integrated argument. For description of + additional output parameters in verbose mode, see [1]. + + Note that it is recommended to specify the latitude and longitude to at + least the fourth decimal place. + + Variables corresponding to standard pvlib variables are renamed, + e.g. `sza` becomes `solar_zenith`. See the + `pvlib.iotools.cams.MCCLEAR_VARIABLE_MAP` dict for the complete mapping. + + + References + ---------- + .. [1] `CAMS McClear Service Info + `_ + .. [2] `CAMS McClear Automatic Access + `_ + """ + + if time_step in TIME_STEPS.keys(): + time_step_str = TIME_STEPS[time_step] + else: + print('WARNING: time step not recognized, 1 hour time step used!') + time_step_str = 'PT01H' + + names = MCCLEAR_COLUMNS + if verbose: + if (time_step == '1min') & (time_ref == 'UT'): + names += MCCLEAR_VERBOSE_COLUMNS + else: + verbose = False + print("Verbose mode only supports 1 min. UT time series!") + + if altitude is None: # Let SoDa get elevation from the NASA SRTM database + altitude = -999 + + # Start and end date should be in the format: yyyy-mm-dd + start_date = start_date.strftime('%Y-%m-%d') + end_date = end_date.strftime('%Y-%m-%d') + + email = email.replace('@', '%2540') # Format email address + + # Format verbose variable to the required format: {'true', 'false'} + verbose = str(verbose).lower() + + # Manual format the request url, due to uncommon usage of & and ; in url + url = ("http://{}/service/wps?Service=WPS&Request=Execute&" + "Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation&" + "DataInputs=latitude={};longitude={};altitude={};" + "date_begin={};date_end={};time_ref={};summarization={};" + "username={};verbose={}" + ).format(server, latitude, longitude, altitude, start_date, + end_date, time_ref, time_step_str, email, verbose) + + res = requests.get(url) + + # Invalid requests returns helpful XML error message + if res.headers['Content-Type'] == 'application/xml': + print('REQUEST ERROR MESSAGE:') + print(res.text.split('ows:ExceptionText')[1][1:-2]) + + # Check if returned file is a csv data file + elif res.headers['Content-Type'] == 'application/csv': + data = pd.read_csv(io.StringIO(res.content.decode('utf-8')), sep=';', + comment='#', header=None, names=names) + + obs_period = data['Observation period'].str.split('/') + + # Set index as the start observation time (left) and localize to UTC + if (label == 'left') | ((label is None) & (time_step != '1M')): + data.index = pd.to_datetime(obs_period.str[0], utc=True) + # Set index as the stop observation time (right) and localize to UTC + elif (label == 'right') | ((label is None) & (time_step == '1M')): + data.index = pd.to_datetime(obs_period.str[1], utc=True) + + data.index.name = None # Set index name to None + + # Change index for '1d' and '1M' to be date and not datetime + if time_step == '1d': + data.index = data.index.date + elif (time_step == '1M') & (label is not None): + data.index = data.index.date + # For monthly data with 'right' label, the index should be the last + # date of the month and not the first date of the following month + elif (time_step == '1M') & (time_step != 'left'): + data.index = data.index.date - pd.Timestamp(days=1) + + if not integrated: # Convert from Wh/m2 to W/m2 + integrated_cols = MCCLEAR_COLUMNS[1:6] + + if time_step == '1M': + time_delta = (pd.to_datetime(obs_period.str[1]) + - pd.to_datetime(obs_period.str[0])) + hours = time_delta.dt.total_seconds()/60/60 + data[integrated_cols] = data[integrated_cols] / hours + else: + data[integrated_cols] = (data[integrated_cols] / + TIME_STEPS_HOURS[time_step]) + + if map_variables: + data = data.rename(columns=MCCLEAR_VARIABLE_MAP) + + return data From 510f08ef8b2d0ee543c197a1433c6294ce410cde Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 22 Feb 2021 22:14:29 +0100 Subject: [PATCH 2/8] Revert "Add cams.get_cams_radiation function" This reverts commit d7deb80cdc5d1b63de5b2865a0c5cf24d4655fc1. --- docs/sphinx/source/api.rst | 1 - docs/sphinx/source/whatsnew/v0.9.0.rst | 3 - pvlib/iotools/__init__.py | 1 - pvlib/iotools/cams.py | 207 ------------------------- 4 files changed, 212 deletions(-) delete mode 100644 pvlib/iotools/cams.py diff --git a/docs/sphinx/source/api.rst b/docs/sphinx/source/api.rst index 31204b6f0d..8805d199a4 100644 --- a/docs/sphinx/source/api.rst +++ b/docs/sphinx/source/api.rst @@ -484,7 +484,6 @@ relevant to solar energy modeling. iotools.get_pvgis_tmy iotools.read_pvgis_tmy iotools.read_bsrn - iotools.get_cams_mcclear A :py:class:`~pvlib.location.Location` object may be created from metadata in some files. diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index a4e2688bb0..81e7a0c60b 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -64,9 +64,6 @@ Enhancements ~~~~~~~~~~~~ * Add :func:`~pvlib.iotools.read_bsrn` for reading BSRN solar radiation data files. (:pull:`1145`, :issue:`1015`) -* Add :func:`~pvlib.iotools.get_cams_radiation` for retrieving CAMS McClear - clear-sky radiation time series. - files. (:pull:`1145`, :issue:`1015`) * In :py:class:`~pvlib.modelchain.ModelChain`, attributes which contain output of models are now collected into ``ModelChain.results``. (:pull:`1076`, :issue:`1067`) diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py index 737ee66d4d..ba5d5e8807 100644 --- a/pvlib/iotools/__init__.py +++ b/pvlib/iotools/__init__.py @@ -14,4 +14,3 @@ from pvlib.iotools.psm3 import parse_psm3 # noqa: F401 from pvlib.iotools.pvgis import get_pvgis_tmy, read_pvgis_tmy # noqa: F401 from pvlib.iotools.bsrn import read_bsrn # noqa: F401 -from pvlib.iotools.cams import get_cams_radiation # noqa: F401 diff --git a/pvlib/iotools/cams.py b/pvlib/iotools/cams.py deleted file mode 100644 index c802420623..0000000000 --- a/pvlib/iotools/cams.py +++ /dev/null @@ -1,207 +0,0 @@ -"""Functions to access data from Copernicus Atmosphere Monitoring Service - (CAMS) radiation service. -.. codeauthor:: Adam R. Jensen -""" - -import pandas as pd -import requests -import io - - -MCCLEAR_COLUMNS = ['Observation period', 'TOA', 'Clear sky GHI', - 'Clear sky BHI', 'Clear sky DHI', 'Clear sky BNI'] - -MCCLEAR_VERBOSE_COLUMNS = ['sza', 'summer/winter split', 'tco3', 'tcwv', - 'AOD BC', 'AOD DU', 'AOD SS', 'AOD OR', 'AOD SU', - 'AOD NI', 'AOD AM', 'alpha', 'Aerosol type', - 'fiso', 'fvol', 'fgeo', 'albedo'] - -# Dictionary mapping CAMS MCCLEAR variables to pvlib names -MCCLEAR_VARIABLE_MAP = { - 'TOA': 'ghi_extra', - 'Clear sky GHI': 'ghi_clear', - 'Clear sky BHI': 'bhi_clear', - 'Clear sky DHI': 'dhi_clear', - 'Clear sky BNI': 'dni_clear', - 'sza': 'solar_zenith', -} - - -# Dictionary mapping Python time steps to CAMS time step format -TIME_STEPS = {'1min': 'PT01M', '15min': 'PT15M', '1h': 'PT01H', '1d': 'P01D', - '1M': 'P01M'} - -TIME_STEPS_HOURS = {'1min': 1/60, '15min': 15/60, '1h': 1, '1d': 24} - - -def get_cams_mcclear(start_date, end_date, latitude, longitude, email, - altitude=None, time_step='1h', time_ref='UT', - integrated=False, label=None, verbose=False, - map_variables=True, server='www.soda-is.com'): - """ - Retrieve time-series of clear-sky global, beam, and diffuse radiation - anywhere in the world from CAMS McClear [1]_ using the WGET service [2]_. - - - Geographical coverage: wordwide - Time coverage: 2004-01-01 to two days ago - Access: free, but requires registration, see [1]_ - Requests: max. 100 per day - - - Parameters - ---------- - start_date: datetime like - First day of the requested period - end_date: datetime like - Last day of the requested period - latitude: float - in decimal degrees, between -90 and 90, north is positive (ISO 19115) - longitude : float - in decimal degrees, between -180 and 180, east is positive (ISO 19115) - altitude: float, default: None - Altitude in meters. If None, then the altitude is determined from the - NASA SRTM database - email: str - Email address linked to a SoDa account - time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' - Time step of the time series, either 1 minute, 15 minute, hourly, - daily, or monthly. - time_reference: str, {'UT', 'TST'}, default: 'UT' - 'UT' (universal time) or 'TST' (True Solar Time) - integrated: boolean, default False - Whether to return integrated irradiation values (Wh/m^2) from CAMS or - average irradiance values (W/m^2) as is more commonly used - label: {‘right’, ‘left’}, default: None - Which bin edge label to label bucket with. The default is ‘left’ for - all frequency offsets except for ‘M’ which has a default of ‘right’. - verbose: boolean, default: False - Verbose mode outputs additional parameters (aerosols). Only avaiable - for 1 minute and universal time. See [1] for parameter description. - map_variables: bool, default: True - When true, renames columns of the Dataframe to pvlib variable names - where applicable. See variable MCCLEAR_VARIABLE_MAP. - server: str, default: 'www.soda-is.com' - Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) - - - Notes - ---------- - The returned data Dataframe includes the following fields: - - ======================= ====== ========================================== - Key, mapped key Format Description - ======================= ====== ========================================== - **Mapped field names are returned when the map_variables argument is True** - -------------------------------------------------------------------------- - Observation period str Beginning/end of time period - TOA, ghi_extra float Horizontal radiation at top of atmosphere - Clear sky GHI, ghi_clear float Clear sky global radiation on horizontal - Clear sky BHI, bhi_clear float Clear sky beam radiation on horizontal - Clear sky DHI, dhi_clear float Clear sky diffuse radiation on horizontal - Clear sky BNI, dni_clear float Clear sky beam radiation normal to sun - ======================= ====== ========================================== - - For the returned units see the integrated argument. For description of - additional output parameters in verbose mode, see [1]. - - Note that it is recommended to specify the latitude and longitude to at - least the fourth decimal place. - - Variables corresponding to standard pvlib variables are renamed, - e.g. `sza` becomes `solar_zenith`. See the - `pvlib.iotools.cams.MCCLEAR_VARIABLE_MAP` dict for the complete mapping. - - - References - ---------- - .. [1] `CAMS McClear Service Info - `_ - .. [2] `CAMS McClear Automatic Access - `_ - """ - - if time_step in TIME_STEPS.keys(): - time_step_str = TIME_STEPS[time_step] - else: - print('WARNING: time step not recognized, 1 hour time step used!') - time_step_str = 'PT01H' - - names = MCCLEAR_COLUMNS - if verbose: - if (time_step == '1min') & (time_ref == 'UT'): - names += MCCLEAR_VERBOSE_COLUMNS - else: - verbose = False - print("Verbose mode only supports 1 min. UT time series!") - - if altitude is None: # Let SoDa get elevation from the NASA SRTM database - altitude = -999 - - # Start and end date should be in the format: yyyy-mm-dd - start_date = start_date.strftime('%Y-%m-%d') - end_date = end_date.strftime('%Y-%m-%d') - - email = email.replace('@', '%2540') # Format email address - - # Format verbose variable to the required format: {'true', 'false'} - verbose = str(verbose).lower() - - # Manual format the request url, due to uncommon usage of & and ; in url - url = ("http://{}/service/wps?Service=WPS&Request=Execute&" - "Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation&" - "DataInputs=latitude={};longitude={};altitude={};" - "date_begin={};date_end={};time_ref={};summarization={};" - "username={};verbose={}" - ).format(server, latitude, longitude, altitude, start_date, - end_date, time_ref, time_step_str, email, verbose) - - res = requests.get(url) - - # Invalid requests returns helpful XML error message - if res.headers['Content-Type'] == 'application/xml': - print('REQUEST ERROR MESSAGE:') - print(res.text.split('ows:ExceptionText')[1][1:-2]) - - # Check if returned file is a csv data file - elif res.headers['Content-Type'] == 'application/csv': - data = pd.read_csv(io.StringIO(res.content.decode('utf-8')), sep=';', - comment='#', header=None, names=names) - - obs_period = data['Observation period'].str.split('/') - - # Set index as the start observation time (left) and localize to UTC - if (label == 'left') | ((label is None) & (time_step != '1M')): - data.index = pd.to_datetime(obs_period.str[0], utc=True) - # Set index as the stop observation time (right) and localize to UTC - elif (label == 'right') | ((label is None) & (time_step == '1M')): - data.index = pd.to_datetime(obs_period.str[1], utc=True) - - data.index.name = None # Set index name to None - - # Change index for '1d' and '1M' to be date and not datetime - if time_step == '1d': - data.index = data.index.date - elif (time_step == '1M') & (label is not None): - data.index = data.index.date - # For monthly data with 'right' label, the index should be the last - # date of the month and not the first date of the following month - elif (time_step == '1M') & (time_step != 'left'): - data.index = data.index.date - pd.Timestamp(days=1) - - if not integrated: # Convert from Wh/m2 to W/m2 - integrated_cols = MCCLEAR_COLUMNS[1:6] - - if time_step == '1M': - time_delta = (pd.to_datetime(obs_period.str[1]) - - pd.to_datetime(obs_period.str[0])) - hours = time_delta.dt.total_seconds()/60/60 - data[integrated_cols] = data[integrated_cols] / hours - else: - data[integrated_cols] = (data[integrated_cols] / - TIME_STEPS_HOURS[time_step]) - - if map_variables: - data = data.rename(columns=MCCLEAR_VARIABLE_MAP) - - return data From 2b395ee5c99695160fe5cab66e99cda70a196dd8 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Thu, 19 May 2022 11:18:36 +0200 Subject: [PATCH 3/8] Allow parsing of http files --- pvlib/iotools/surfrad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/iotools/surfrad.py b/pvlib/iotools/surfrad.py index b819923497..fdc406b748 100644 --- a/pvlib/iotools/surfrad.py +++ b/pvlib/iotools/surfrad.py @@ -123,7 +123,7 @@ def read_surfrad(filename, map_variables=True): .. [2] NOAA SURFRAD Data Archive `SURFRAD Archive `_ """ - if str(filename).startswith('ftp'): + if str(filename).startswith('ftp') or str(filename).startswith('http'): req = Request(filename) response = urlopen(req) file_buffer = io.StringIO(response.read().decode(errors='ignore')) From b2970003a1ebe947be88eac4d600f1cfc9acfa2b Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Thu, 19 May 2022 11:18:45 +0200 Subject: [PATCH 4/8] Add test for https file --- pvlib/tests/iotools/test_surfrad.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pvlib/tests/iotools/test_surfrad.py b/pvlib/tests/iotools/test_surfrad.py index 6ef9fcab51..83f7ec7645 100644 --- a/pvlib/tests/iotools/test_surfrad.py +++ b/pvlib/tests/iotools/test_surfrad.py @@ -7,6 +7,8 @@ testfile = DATA_DIR / 'surfrad-slv16001.dat' network_testfile = ('ftp://aftp.cmdl.noaa.gov/data/radiation/surfrad/' 'Alamosa_CO/2016/slv16001.dat') +https_testfile = ('https://gml.noaa.gov/aftp/data/radiation/surfrad/' + 'Alamosa_CO/2016/slv16001.dat') @pytest.mark.remote_data @@ -19,6 +21,17 @@ def test_read_surfrad_network(): assert local_data.equals(network_data) +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_read_surfrad_https(): + # Test reading of https files. + # If this test begins failing, SURFRAD's data structure or data + # archive may have changed. + local_data, _ = surfrad.read_surfrad(testfile) + network_data, _ = surfrad.read_surfrad(https_testfile) + assert local_data.equals(network_data) + + def test_read_surfrad_columns_no_map(): data, _ = surfrad.read_surfrad(testfile, map_variables=False) assert 'zen' in data.columns From b313c64d03b834186b4af8c9a890d01ec6440e95 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Thu, 19 May 2022 15:31:54 +0200 Subject: [PATCH 5/8] Squashed commit of the following: MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit 5047b26c7d0c8b1bb65c397b32839b641c00f452 Author: Prajwal Borkar <48290911+PrajwalBorkar@users.noreply.github.com> Date: Tue May 17 19:14:53 2022 +0530 Updated get_cams protocol to https #1457 (#1458) * Updated get_cams protocol to https #1457 * Updated instances of http to https. #1457 * Updated documentation links to https * Added Contributor commit a0812b12584cfd5e662fa5aeb8972090763a671f Author: roger-lcc <58332996+roger-lcc@users.noreply.github.com> Date: Wed May 4 20:01:42 2022 +0800 CI asv check (#1454) * CI asv check * added CI asv check * CI asv check * CI asv check * updated CI asv check * Update docs/sphinx/source/whatsnew/v0.9.2.rst updated v0.9.2.rst Co-authored-by: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Co-authored-by: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> commit 83e379a1cf1d82cf85534feeb32f1f08cae583fb Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Thu Apr 28 19:26:09 2022 -0400 Bump pandas to 0.25.0; test updates (#1448) * bump pandas min from 0.22.0 to 0.25.0 * fix buggy test__check_pandas_assert_kwargs don't use monkeypatch and mocker in the same test function. https://github.com/pytest-dev/pytest-mock/issues/289 * fix psm3 test (apparent_zenith -> solar_zenith) * whatsnew * better UTC conversion in sun_rise_set_transit_ephem * helpful comments * more whatsnew * '3.0' -> '3' in read_crn test? * apply dtypes during parsing in read_crn * move dropna() post-processing into read_fwf call * fix read_crn for pandas<1.2.0 * Update pvlib/solarposition.py Co-authored-by: Will Holmgren * nix pytz * UTC -> utc * address simd arccos issue in tracking.singleaxis Co-authored-by: Will Holmgren commit 8d0f863da92739669e01ac4da4145e4547638b50 Author: Naman Priyadarshi <77211855+Naman-Priyadarshi@users.noreply.github.com> Date: Tue Apr 12 22:55:58 2022 +0530 Advance numba from 0.36.1 to 0.40.0 in asv py3.6 environment (#1440) * Advance numba from 0.36.1 to 0.40.0 * Advance numba from 0.36.1 to 0.40.0 * Updated whatsnew.rst commit 5cb695d2a5d2edbadb41e876858263e81a1f443c Author: Naman Priyadarshi <77211855+Naman-Priyadarshi@users.noreply.github.com> Date: Wed Apr 6 23:58:03 2022 +0530 Remove unnecessary **kwargs from spa_python and get_total_irradiance (#1437) * Update Solarposition.py Removed **kwargs from pvlib.solarposition.spa_python * Added v0.9.2.rst, changes in pvlib/irradiance.py and pvlib/location.py Made new v0.9.2.rst and removed **kwargs from pvlib/irradiance.py (Line 309) and pvlib/location.py (Line 234-235) * Update docs/sphinx/source/whatsnew/v0.9.2.rst * Update docs/sphinx/source/whatsnew/v0.9.2.rst Co-authored-by: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> commit 8460b360052c9eff3abdfd681a83ae062247a38a Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Tue Mar 29 15:31:25 2022 -0600 Finalize 0.9.1 (#1431) * fix heading levels in user_guide/bifacial.rst * whatsnew cleanup * fix readme html missing tag, maybe unicode zero-width spaces? * readme: link to universal zenodo doi * readme: update installation link for #1173 * whatsnew date * additional contributors * delete errant space commit edbf2a68942cbc1ef725ceffd4a356e8d0228258 Author: RoyCoding8 <92641125+RoyCoding8@users.noreply.github.com> Date: Wed Mar 30 01:58:18 2022 +0530 Updated plot_singlediode.py (#1434) * Update plot_singlediode.py Changed the unit from C to degree C (°C) * Update plot_singlediode.py Changed to LaTeX \degree symbol in matplotlib which avoids any encoding issues with using Unicode characters. * Update v0.9.1.rst Added name to the contributors' list * Update v0.9.1.rst commit cf4a8adc8200be5c59e575483ffb0c37bd838e09 Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Tue Mar 29 14:04:40 2022 -0600 Update sphinx to 4.5.0 (#1435) * bump sphinx and pydata-sphinx-theme versions * clean up sphinx conf.py * fix distutils strangeness, maybe * use freshly-released sphinx==4.5.0 commit 884a15308ff75055510cd74053ba202067533155 Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Wed Mar 23 13:41:35 2022 -0600 Clarify delta_t docstring descriptions (#1429) * clarify delta_t docstrings * whatsnew commit c2431832040f585969bbd1868f6acdd6ddb4e0a5 Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Thu Mar 17 12:01:57 2022 -0600 Deprecate pvlib.forecast (#1426) * deprecate pvlib.forecast classes * catch warnings in tests * add warning admonition to forecasts.rst * whatsnew * stickler * pin pytest < 7.1.0 * pin pytest in the right place this time * more warning suppression in tests * unpin pytest * Update docs/sphinx/source/whatsnew/v0.9.1.rst * copy warning to reference/forecasting.rst commit e3baa126eb832a27b5faa128f57d37cf268fd8f2 Author: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> Date: Thu Mar 17 11:28:56 2022 -0600 Fix conditional dependency on dataclasses (#1422) * better conditional dependency on dataclasses * whatsnew commit 27cba7ae3fd8a5d9ad16553f6059a174630479b7 Author: Naman Priyadarshi <77211855+Naman-Priyadarshi@users.noreply.github.com> Date: Thu Mar 17 22:48:08 2022 +0530 Added asv benchmarking badge to the table of badges in the main README. (#1427) * Update Readme.md Added benchmarks asv badge to the badge section * Updated v.0.9.1.rst Added my name to the list of Contributers. commit 1893b20a7b755004f561037161c242db24e2870c Author: Adam R. Jensen <39184289+AdamRJensen@users.noreply.github.com> Date: Mon Mar 14 18:37:58 2022 +0100 Add variable mapping of psm3 (#1374) * Add variable mapping of psm3 * Add enhancement entry in whatsnew * Fix stickler * Map keys in metadata dict * Remove double spaces in docs * Fix stickler * Doc update Co-authored-by: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> * Reformatting - changes by kanderso-nrel * Update docstring table with 2020 * Add deprecation warning test coverage * Rename to VARIABLE_MAP * Change apparent_zenith to solar_zenith Based on the decision in #1403 * Update attributes docstring * Change elevation to altitude when mapping variables * Update psm3 variable mapping test Co-authored-by: Kevin Anderson <57452607+kanderso-nrel@users.noreply.github.com> --- .github/workflows/asv_check.yml | 34 ++++++++ README.md | 18 +++-- benchmarks/asv.conf.json | 4 +- ci/requirements-py36-min.yml | 2 +- ci/requirements-py36.yml | 2 +- ci/requirements-py37.yml | 2 +- ci/requirements-py38.yml | 2 +- ci/requirements-py39.yml | 2 +- docs/examples/iv-modeling/plot_singlediode.py | 2 +- docs/sphinx/source/conf.py | 7 +- docs/sphinx/source/reference/forecasting.rst | 6 ++ docs/sphinx/source/user_guide/bifacial.rst | 4 +- docs/sphinx/source/user_guide/forecasts.rst | 19 +++++ docs/sphinx/source/whatsnew/v0.9.1.rst | 30 ++++--- docs/sphinx/source/whatsnew/v0.9.2.rst | 42 ++++++++++ pvlib/forecast.py | 15 ++++ pvlib/iotools/crn.py | 18 ++++- pvlib/iotools/psm3.py | 79 +++++++++++++++---- pvlib/iotools/sodapro.py | 16 ++-- pvlib/irradiance.py | 2 +- pvlib/location.py | 3 +- pvlib/solarposition.py | 36 ++++----- pvlib/spa.py | 10 +-- pvlib/tests/iotools/test_crn.py | 2 +- pvlib/tests/iotools/test_psm3.py | 56 +++++++++++-- pvlib/tests/iotools/test_sodapro.py | 4 +- pvlib/tests/test_conftest.py | 19 +++-- pvlib/tests/test_forecast.py | 44 +++++++---- pvlib/tracking.py | 3 + setup.py | 13 ++- 30 files changed, 372 insertions(+), 124 deletions(-) create mode 100644 .github/workflows/asv_check.yml create mode 100644 docs/sphinx/source/whatsnew/v0.9.2.rst diff --git a/.github/workflows/asv_check.yml b/.github/workflows/asv_check.yml new file mode 100644 index 0000000000..db0f11f4da --- /dev/null +++ b/.github/workflows/asv_check.yml @@ -0,0 +1,34 @@ +name: asv + +# CI ASV CHECK is aimed to verify that the benchmarks execute without error. +on: [pull_request, push] + +jobs: + quick: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -el {0} + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install Python + uses: actions/setup-python@v3 + with: + python-version: '3.9.7' + + - name: Install asv + run: pip install asv==0.4.2 + + - name: Run asv benchmarks + run: | + cd benchmarks + asv machine --yes + asv run HEAD^! --quick --dry-run --show-stderr | sed "/failed$/ s/^/##[error]/" | tee benchmarks.log + if grep "failed" benchmarks.log > /dev/null ; then + exit 1 + fi + diff --git a/README.md b/README.md index 2911388d29..211eb7ff95 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Code Quality -   + lgtm quality grade @@ -46,7 +46,7 @@ Coverage -   + coveralls coverage @@ -55,11 +55,19 @@ + + Benchmarks + + + + + + Publications - - zenodo reference + + zenodo reference JOSS reference @@ -100,7 +108,7 @@ Installation ============ pvlib-python releases may be installed using the ``pip`` and ``conda`` tools. -Please see the [Installation page](http://pvlib-python.readthedocs.io/en/stable/installation.html) of the documentation for complete instructions. +Please see the [Installation page](https://pvlib-python.readthedocs.io/en/stable/user_guide/installation.html) of the documentation for complete instructions. Contributing diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index c7da42acf2..79773e928f 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -116,12 +116,12 @@ { "python": "3.6", "numpy": "1.16.0", - "pandas": "0.22.0", + "pandas": "0.25.0", "scipy": "1.2.0", // Note: these don't have a minimum in setup.py "h5py": "2.10.0", "ephem": "3.7.6.0", - "numba": "0.36.1", + "numba": "0.40.0", }, // latest versions available { diff --git a/ci/requirements-py36-min.yml b/ci/requirements-py36-min.yml index 495099cf8f..dfff0a9f97 100644 --- a/ci/requirements-py36-min.yml +++ b/ci/requirements-py36-min.yml @@ -16,7 +16,7 @@ dependencies: - dataclasses - h5py==3.1.0 - numpy==1.16.0 - - pandas==0.22.0 + - pandas==0.25.0 - scipy==1.2.0 - pytest-rerunfailures # conda version is >3.6 - pytest-remotedata # conda package is 0.3.0, needs > 0.3.1 diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index 295bedda7c..596a2e0bbd 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.22.0 + - pandas >= 0.25.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 241cafe128..8a971d728a 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.22.0 + - pandas >= 0.25.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index 6ff5d2da0f..db763c3d73 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.22.0 + - pandas >= 0.25.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index 32abde067a..3621a737b6 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -11,7 +11,7 @@ dependencies: - nose # - numba # python 3.9 compat in early 2021 - numpy >= 1.16.0 - - pandas >= 0.22.0 + - pandas >= 0.25.0 - pip - pytest - pytest-cov diff --git a/docs/examples/iv-modeling/plot_singlediode.py b/docs/examples/iv-modeling/plot_singlediode.py index c92d5e999e..e9085b3341 100644 --- a/docs/examples/iv-modeling/plot_singlediode.py +++ b/docs/examples/iv-modeling/plot_singlediode.py @@ -103,7 +103,7 @@ for i, case in conditions.iterrows(): label = ( "$G_{eff}$ " + f"{case['Geff']} $W/m^2$\n" - "$T_{cell}$ " + f"{case['Tcell']} $C$" + "$T_{cell}$ " + f"{case['Tcell']} $\\degree C$" ) plt.plot(curve_info['v'][i], curve_info['i'][i], label=label) v_mp = curve_info['v_mp'][i] diff --git a/docs/sphinx/source/conf.py b/docs/sphinx/source/conf.py index 9826781d6d..fb5228c332 100644 --- a/docs/sphinx/source/conf.py +++ b/docs/sphinx/source/conf.py @@ -20,6 +20,9 @@ # for generating GH links with linenumbers import inspect +# import distutils before calling pd.show_versions() +# https://github.com/pypa/setuptools/issues/3044 +import distutils # noqa: F401 import pandas as pd pd.show_versions() @@ -244,7 +247,7 @@ def setup(app): # In-line links to references as numbers in brackets. app.add_css_file("reference_format.css") # Add a warning banner at the top of the page if viewing the "latest" docs - app.add_javascript("version-alert.js") + app.add_js_file("version-alert.js") # -- Options for LaTeX output --------------------------------------------- @@ -343,8 +346,6 @@ def setup(app): 'matplotlib': ('https://matplotlib.org/stable', None), } -nbsphinx_allow_errors = True - ipython_warning_is_error = False # suppress "WARNING: Footnote [1] is not referenced." messages diff --git a/docs/sphinx/source/reference/forecasting.rst b/docs/sphinx/source/reference/forecasting.rst index ff4df7ed4d..8b1e125c8a 100644 --- a/docs/sphinx/source/reference/forecasting.rst +++ b/docs/sphinx/source/reference/forecasting.rst @@ -3,6 +3,12 @@ Forecasting =========== +.. warning:: + + All functionality in the ``pvlib.forecast`` module is deprecated as of + pvlib v0.9.1. For details, see :ref:`forecasts`. + + Forecast models --------------- diff --git a/docs/sphinx/source/user_guide/bifacial.rst b/docs/sphinx/source/user_guide/bifacial.rst index ec18d04a5a..b0299041ed 100644 --- a/docs/sphinx/source/user_guide/bifacial.rst +++ b/docs/sphinx/source/user_guide/bifacial.rst @@ -24,7 +24,7 @@ irradiance: pvfactors -========= +--------- The `pvfactors `_ package calculates incident irradiance on the front and back surfaces of an array. pvfactors uses @@ -35,7 +35,7 @@ trackers. Infinite Sheds -============== +-------------- The "infinite sheds" model [1] is a 2-dimensional model of irradiance on the front and rear surfaces of a PV array. The model assumes that the array diff --git a/docs/sphinx/source/user_guide/forecasts.rst b/docs/sphinx/source/user_guide/forecasts.rst index a89904eccd..d61b40387a 100644 --- a/docs/sphinx/source/user_guide/forecasts.rst +++ b/docs/sphinx/source/user_guide/forecasts.rst @@ -4,6 +4,25 @@ Forecasting *********** +.. warning:: + + The ``pvlib.forecast`` module is deprecated as of version ``0.9.1``. + + Because none of the current pvlib team members are able to continue + maintaining it, the functionality in ``pvlib.forecast`` is deprecated + and will be removed without replacement in a future version. If you + are interested in maintaining this functionality, please let us know. + + You can fetch forecast data yourself using ``siphon`` (see the + docs below this warning) and the code from pvlib v0.9.0 as a reference: + https://github.com/pvlib/pvlib-python/blob/v0.9.0/pvlib/forecast.py + + The `Solar Forecast Arbiter Core + `_ + offers similar (and more robust) forecast processing functionality + and may be a suitable replacement for some users. + + pvlib python provides a set of functions and classes that make it easy to obtain weather forecast data and convert that data into a PV power forecast. Users can retrieve standardized weather forecast data relevant diff --git a/docs/sphinx/source/whatsnew/v0.9.1.rst b/docs/sphinx/source/whatsnew/v0.9.1.rst index 685a7880a4..2440f215c7 100644 --- a/docs/sphinx/source/whatsnew/v0.9.1.rst +++ b/docs/sphinx/source/whatsnew/v0.9.1.rst @@ -1,25 +1,26 @@ .. _whatsnew_0910: -v0.9.1 (TBD) --------------------------- - -Breaking changes -~~~~~~~~~~~~~~~~ +v0.9.1 (March 29, 2022) +----------------------- Deprecations ~~~~~~~~~~~~ * Moved :py:func:`pvlib.bifacial.pvfactors_timeseries` to :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries`. - :py:module:`pvlib.bifacial` is now a sub-package. (:pull:`717`) + :py:mod:`pvlib.bifacial` is now a sub-package. (:pull:`717`) * :py:func:`pvlib.modelchain.basic_chain` is deprecated. See :py:meth:`pvlib.modelchain.ModelChain.with_pvwatts` and :py:meth:`pvlib.modelchain.ModelChain.with_sapm` for alternative simplified :py:class:`~pvlib.modelchain.ModelChain` interfaces, although note that the inputs do not directly translate. (:pull:`1401`) +* All functionality in the ``pvlib.forecast`` module is deprecated. + For details, see :ref:`forecasts`. (:issue:`1057`, :pull:`1426`) Enhancements ~~~~~~~~~~~~ -* Added `pvlib.bifacial.infinite_sheds`, containing a model for irradiance +* Added ``map_variables`` option to :py:func:`pvlib.iotools.get_psm3` and + :py:func:`pvlib.iotools.read_psm3` (:pull:`1374`) +* Added ``pvlib.bifacial.infinite_sheds``, containing a model for irradiance on front and back surfaces of bifacial arrays. (:pull:`717`) * Added ``map_variables`` option to :func:`~pvlib.iotools.read_crn` (:pull:`1368`) * Added :py:func:`pvlib.temperature.prilliman` for modeling cell temperature @@ -33,7 +34,7 @@ Bug fixes values were returned when the sun is behind the plane of array (:issue:`1348`, :pull:`1349`) * Fixed bug in :py:func:`pvlib.iotools.get_pvgis_hourly` where the ``optimal_surface_tilt`` argument was not being passed to the ``optimalinclination`` request parameter (:pull:`1356`) -* Fixed bug in :py:func:`pvlib.bifacial.pvfactors_timeseries` where scalar ``surface_tilt`` +* Fixed bug in :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries` where scalar ``surface_tilt`` and ``surface_azimuth`` inputs caused an error (:issue:`1127`, :issue:`1332`, :pull:`1361`) * Added -99999 to list of values to map to nan in :func:`~pvlib.iotools.read_crn` (:issue:`1372`, :pull:`1368`) @@ -54,14 +55,19 @@ Documentation * Fix documentation return error in :py:meth:`pvlib.forecast.ForecastModel.cloud_cover_to_transmittance_linear` (:issue:`1367`, :pull:`1370`) * Add gallery example illustrating bifacial simulation using the - :py:func:`pvlib.bifacial.pvfactors_timeseries` function and either + :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries` function and either :py:class:`pvlib.modelchain.ModelChain` or procedural functions. (:pull:`1394`) * Fix some typos (:pull:`1414`) +* Clarify the descriptions of ``delta_t`` in several solar position functions + (:pull:`1429`) +* Changed $C$ to $\\degree C$ in the plot_singlediode.py file (:pull:`1434`) Requirements ~~~~~~~~~~~~ * numpy >= 1.16.0 is now required for all python versions. (:pull:`1400`) +* Fix an installation issue with ``dataclasses`` on python 3.6 + (:issue:`1302`, :pull:`1422`) Contributors @@ -76,3 +82,9 @@ Contributors * Saurabh Aneja (:ghuser:`spaneja`) * Jack Kelly (:ghuser:`JackKelly`) * Somasree Majumder(:ghuser:`soma2000-lang`) +* Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) +* Will Holmgren (:ghuser:`wholmgren`) +* Mark Mikofksi (:ghuser:`mikofski`) +* Will Hobbs (:ghuser:`williamhobbs`) +* Uday Guntupalli (:ghuser:`uguntupalli`) +* Shashwata Roy (:ghuser:`RoyCoding8`) diff --git a/docs/sphinx/source/whatsnew/v0.9.2.rst b/docs/sphinx/source/whatsnew/v0.9.2.rst new file mode 100644 index 0000000000..05b7ed58ab --- /dev/null +++ b/docs/sphinx/source/whatsnew/v0.9.2.rst @@ -0,0 +1,42 @@ +.. _whatsnew_0920: + +v0.9.2 (TBD) +----------------------- + +Deprecations +~~~~~~~~~~~~ + +Enhancements +~~~~~~~~~~~~ + +Bug fixes +~~~~~~~~~ +* :py:func:`pvlib.irradiance.get_total_irradiance` and + :py:func:`pvlib.solarposition.spa_python` now raise an error instead + of silently ignoring unknown parameters (:pull:`1437`) +* Fix a bug in :py:func:`pvlib.solarposition.sun_rise_set_transit_ephem` + where passing localized timezones with large UTC offsets could return + rise/set/transit times for the wrong day in recent versions of ``ephem`` + (:issue:`1449`, :pull:`1448`) + + +Testing +~~~~~~~ + +Documentation +~~~~~~~~~~~~~ + +Benchmarking +~~~~~~~~~~~~~ +* Updated version of numba in asv.conf from 0.36.1 to 0.40.0 to solve numba/numpy conflict. (:issue:`1439`, :pull:`1440`) +* Added a basic CI asv check (:issue:`1446`, :pull:`1454`) + +Requirements +~~~~~~~~~~~~ +* Minimum pandas version increased to v0.25.0, released July 18, 2019. (:pull:`1448`) + +Contributors +~~~~~~~~~~~~ +* Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) +* Chencheng Luo (:ghuser:`roger-lcc`) +* Prajwal Borkar (:ghuser:`PrajwalBorkar`) diff --git a/pvlib/forecast.py b/pvlib/forecast.py index 634ae75ab2..ce80e0ad74 100644 --- a/pvlib/forecast.py +++ b/pvlib/forecast.py @@ -15,6 +15,7 @@ from siphon.ncss import NCSS import warnings +from pvlib._deprecation import deprecated warnings.warn( @@ -22,7 +23,15 @@ 'The API may change, the functionality may be consolidated into an io ' 'module, or the module may be separated into its own package.') +_forecast_deprecated = deprecated( + since='0.9.1', + removal='a future release', + addendum='For details, see https://pvlib-python.readthedocs.io/en/stable/user_guide/forecasts.html' # noqa: E501 +) +# don't decorate the base class to prevent the subclasses from showing +# duplicate warnings: +# @_forecast_deprecated class ForecastModel: """ An object for querying and holding forecast model information for @@ -684,6 +693,7 @@ def gust_to_speed(self, data, scaling=1/1.4): return wind_speed +@_forecast_deprecated class GFS(ForecastModel): """ Subclass of the ForecastModel class representing GFS @@ -785,6 +795,7 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] +@_forecast_deprecated class HRRR_ESRL(ForecastModel): # noqa: N801 """ Subclass of the ForecastModel class representing @@ -875,6 +886,7 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] +@_forecast_deprecated class NAM(ForecastModel): """ Subclass of the ForecastModel class representing NAM @@ -956,6 +968,7 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] +@_forecast_deprecated class HRRR(ForecastModel): """ Subclass of the ForecastModel class representing HRRR @@ -1044,6 +1057,7 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] +@_forecast_deprecated class NDFD(ForecastModel): """ Subclass of the ForecastModel class representing NDFD forecast @@ -1112,6 +1126,7 @@ def process_data(self, data, **kwargs): return data[self.output_variables] +@_forecast_deprecated class RAP(ForecastModel): """ Subclass of the ForecastModel class representing RAP forecast model. diff --git a/pvlib/iotools/crn.py b/pvlib/iotools/crn.py index fe46bb69d2..90e1d6d6b4 100644 --- a/pvlib/iotools/crn.py +++ b/pvlib/iotools/crn.py @@ -2,6 +2,7 @@ """ import pandas as pd +import numpy as np HEADERS = [ @@ -107,13 +108,24 @@ def read_crn(filename, map_variables=True): """ # read in data + # TODO: instead of parsing as strings and then post-processing, switch to + # pd.read_fwf(..., dtype=dict(zip(HEADERS, DTYPES)), skip_blank_lines=True) + # when our minimum pandas >= 1.2.0 (skip_blank_lines bug for <1.2.0). + # As a workaround, parse all values as strings, then drop NaN, then cast + # to the appropriate dtypes, and mask "sentinal" NaN (e.g. -9999.0) data = pd.read_fwf(filename, header=None, names=HEADERS, widths=WIDTHS, - na_values=NAN_DICT) - # Remove rows with all nans + dtype=str) + + # drop empty (bad) lines data = data.dropna(axis=0, how='all') - # set dtypes here because dtype kwarg not supported in read_fwf until 0.20 + + # can't set dtypes in read_fwf because int cols can't contain NaN, so + # do it here instead data = data.astype(dict(zip(HEADERS, DTYPES))) + # finally, replace -999 values with NaN + data = data.replace(NAN_DICT, value=np.nan) + # set index # UTC_TIME does not have leading 0s, so must zfill(4) to comply # with %H%M format diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 10ff919c56..a8f9781c22 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -1,4 +1,3 @@ - """ Get PSM3 TMY see https://developer.nrel.gov/docs/solar/nsrdb/psm3_data_download/ @@ -8,6 +7,8 @@ import requests import pandas as pd from json import JSONDecodeError +import warnings +from pvlib._deprecation import pvlibDeprecationWarning NSRDB_API_BASE = "https://developer.nrel.gov" PSM_URL = NSRDB_API_BASE + "/api/nsrdb/v2/solar/psm3-download.csv" @@ -20,12 +21,31 @@ 'surface_pressure', 'wind_direction', 'wind_speed') PVLIB_PYTHON = 'pvlib python' +# Dictionary mapping PSM3 names to pvlib names +VARIABLE_MAP = { + 'GHI': 'ghi', + 'DHI': 'dhi', + 'DNI': 'dni', + 'Clearsky GHI': 'ghi_clear', + 'Clearsky DHI': 'dhi_clear', + 'Clearsky DNI': 'dni_clear', + 'Solar Zenith Angle': 'solar_zenith', + 'Temperature': 'temp_air', + 'Relative Humidity': 'relative_humidity', + 'Dew point': 'temp_dew', + 'Pressure': 'pressure', + 'Wind Direction': 'wind_direction', + 'Wind Speed': 'wind_speed', + 'Surface Albedo': 'albedo', + 'Precipitable Water': 'precipitable_water', +} + def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, attributes=ATTRIBUTES, leap_day=False, full_name=PVLIB_PYTHON, - affiliation=PVLIB_PYTHON, timeout=30): + affiliation=PVLIB_PYTHON, map_variables=None, timeout=30): """ - Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB + Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB is described in [1]_ and the PSM3 API is described in [2]_, [3]_, and [4]_. .. versionchanged:: 0.9.0 @@ -48,19 +68,23 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, PSM3 API parameter specifing year or TMY variant to download, see notes below for options interval : int, {60, 5, 15, 30} - interval size in minutes, must be 5, 15, 30 or 60. Only used for + interval size in minutes, must be 5, 15, 30 or 60. Only used for single-year requests (i.e., it is ignored for tmy/tgy/tdy requests). attributes : list of str, optional meteorological fields to fetch. If not specified, defaults to ``pvlib.iotools.psm3.ATTRIBUTES``. See references [2]_, [3]_, and [4]_ - for lists of available fields. + for lists of available fields. Alternatively, pvlib names may also be + used (e.g. 'ghi' rather than 'GHI'); see :const:`VARIABLE_MAP`. leap_day : boolean, default False - include leap day in the results. Only used for single-year requests + include leap day in the results. Only used for single-year requests (i.e., it is ignored for tmy/tgy/tdy requests). full_name : str, default 'pvlib python' optional affiliation : str, default 'pvlib python' optional + map_variables: boolean, optional + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable :const:`VARIABLE_MAP`. timeout : int, default 30 time in seconds to wait for server response before timeout @@ -96,14 +120,15 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, +===========+=============================================================+ | Year | 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, | | | 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, | - | | 2018, 2019 | + | | 2018, 2019, 2020 | +-----------+-------------------------------------------------------------+ | TMY | tmy, tmy-2016, tmy-2017, tdy-2017, tgy-2017, | | | tmy-2018, tdy-2018, tgy-2018, tmy-2019, tdy-2019, tgy-2019 | + | | tmy-2020, tdy-2020, tgy-2020 | +-----------+-------------------------------------------------------------+ .. warning:: PSM3 is limited to data found in the NSRDB, please consult the - references below for locations with available data. Additionally, + references below for locations with available data. Additionally, querying data with < 30-minute resolution uses a different API endpoint with fewer available fields (see [4]_). @@ -133,6 +158,13 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, # convert to string to accomodate integer years being passed in names = str(names) + # convert pvlib names in attributes to psm3 convention (reverse mapping) + # unlike psm3 columns, attributes are lower case and with underscores + amap = {value: key.lower().replace(' ', '_') for (key, value) in + VARIABLE_MAP.items()} + attributes = [amap.get(a, a) for a in attributes] + attributes = list(set(attributes)) # remove duplicate values + # required query-string parameters for request to PSM3 API params = { 'api_key': api_key, @@ -167,12 +199,12 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, # the CSV is in the response content as a UTF-8 bytestring # to use pandas we need to create a file buffer from the response fbuf = io.StringIO(response.content.decode('utf-8')) - return parse_psm3(fbuf) + return parse_psm3(fbuf, map_variables) -def parse_psm3(fbuf): +def parse_psm3(fbuf, map_variables=None): """ - Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB + Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. .. versionchanged:: 0.9.0 @@ -184,6 +216,9 @@ def parse_psm3(fbuf): ---------- fbuf: file-like object File-like object containing data to read. + map_variables: bool + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable VARIABLE_MAP. Returns ------- @@ -296,12 +331,25 @@ def parse_psm3(fbuf): tz = 'Etc/GMT%+d' % -metadata['Time Zone'] data.index = pd.DatetimeIndex(dtidx).tz_localize(tz) + if map_variables is None: + warnings.warn( + 'PSM3 variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.11.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning) + map_variables = False + if map_variables: + data = data.rename(columns=VARIABLE_MAP) + metadata['latitude'] = metadata.pop('Latitude') + metadata['longitude'] = metadata.pop('Longitude') + metadata['altitude'] = metadata.pop('Elevation') + return data, metadata -def read_psm3(filename): +def read_psm3(filename, map_variables=None): """ - Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB + Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. .. versionchanged:: 0.9.0 @@ -313,6 +361,9 @@ def read_psm3(filename): ---------- filename: str Filename of a file containing data to read. + map_variables: bool + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable VARIABLE_MAP. Returns ------- @@ -334,5 +385,5 @@ def read_psm3(filename): `_ """ with open(str(filename), 'r') as fbuf: - content = parse_psm3(fbuf) + content = parse_psm3(fbuf, map_variables) return content diff --git a/pvlib/iotools/sodapro.py b/pvlib/iotools/sodapro.py index abb06da8bb..a5d3e1efd1 100644 --- a/pvlib/iotools/sodapro.py +++ b/pvlib/iotools/sodapro.py @@ -153,11 +153,11 @@ def get_cams(latitude, longitude, start, end, email, identifier='mcclear', References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ .. [3] `CAMS McClear Automatic Access - `_ + `_ """ try: time_step_str = TIME_STEPS_MAP[time_step] @@ -185,7 +185,7 @@ def get_cams(latitude, longitude, start, end, email, identifier='mcclear', email = email.replace('@', '%2540') # Format email address identifier = 'get_{}'.format(identifier.lower()) # Format identifier str - base_url = f"http://{server}/service/wps" + base_url = f"https://{server}/service/wps" data_inputs_dict = { 'latitude': latitude, @@ -263,9 +263,9 @@ def parse_cams(fbuf, integrated=False, label=None, map_variables=True): References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ """ metadata = {} # Initial lines starting with # contain metadata @@ -366,9 +366,9 @@ def read_cams(filename, integrated=False, label=None, map_variables=True): References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ """ with open(str(filename), 'r') as fbuf: content = parse_cams(fbuf, integrated, label, map_variables) diff --git a/pvlib/irradiance.py b/pvlib/irradiance.py index c5c3b1c095..3de4d96f65 100644 --- a/pvlib/irradiance.py +++ b/pvlib/irradiance.py @@ -306,7 +306,7 @@ def get_total_irradiance(surface_tilt, surface_azimuth, dni, ghi, dhi, dni_extra=None, airmass=None, albedo=.25, surface_type=None, model='isotropic', - model_perez='allsitescomposite1990', **kwargs): + model_perez='allsitescomposite1990'): r""" Determine total in-plane irradiance and its beam, sky diffuse and ground reflected components, using the specified sky diffuse irradiance model. diff --git a/pvlib/location.py b/pvlib/location.py index 104fccac89..ddd2d1b96a 100644 --- a/pvlib/location.py +++ b/pvlib/location.py @@ -231,8 +231,7 @@ def get_clearsky(self, times, model='ineichen', solar_position=None, pressure = atmosphere.alt2pres(self.altitude) if solar_position is None: - solar_position = self.get_solarposition(times, pressure=pressure, - **kwargs) + solar_position = self.get_solarposition(times, pressure=pressure) apparent_zenith = solar_position['apparent_zenith'] apparent_elevation = solar_position['apparent_elevation'] diff --git a/pvlib/solarposition.py b/pvlib/solarposition.py index 4047187533..cdcacd7ec6 100644 --- a/pvlib/solarposition.py +++ b/pvlib/solarposition.py @@ -22,6 +22,7 @@ import pandas as pd import scipy.optimize as so import warnings +import datetime from pvlib import atmosphere from pvlib.tools import datetime_to_djd, djd_to_datetime @@ -275,7 +276,7 @@ def _spa_python_import(how): def spa_python(time, latitude, longitude, altitude=0, pressure=101325, temperature=12, delta_t=67.0, - atmos_refract=None, how='numpy', numthreads=4, **kwargs): + atmos_refract=None, how='numpy', numthreads=4): """ Calculate the solar position using a python implementation of the NREL SPA algorithm. @@ -304,13 +305,13 @@ def spa_python(time, latitude, longitude, temperature : int or float, optional, default 12 avg. yearly air temperature in degrees C. delta_t : float, optional, default 67.0 + Difference between terrestrial time and UT1. If delta_t is None, uses spa.calculate_deltat using time.year and time.month from pandas.DatetimeIndex. - For most simulations specifing delta_t is sufficient. - Difference between terrestrial time and UT1. + For most simulations the default delta_t is sufficient. *Note: delta_t = None will break code using nrel_numba, this will be fixed in a future version.* - The USNO has historical and forecasted delta_t [3]. + The USNO has historical and forecasted delta_t [3]_. atmos_refrac : None or float, optional, default None The approximate atmospheric refraction (in degrees) at sunrise and sunset. @@ -405,18 +406,17 @@ def sun_rise_set_transit_spa(times, latitude, longitude, how='numpy', Latitude in degrees, positive north of equator, negative to south longitude : float Longitude in degrees, positive east of prime meridian, negative to west - delta_t : float, optional - If delta_t is None, uses spa.calculate_deltat - using times.year and times.month from pandas.DatetimeIndex. - For most simulations specifing delta_t is sufficient. - Difference between terrestrial time and UT1. - delta_t = None will break code using nrel_numba, - this will be fixed in a future version. - By default, use USNO historical data and predictions how : str, optional, default 'numpy' Options are 'numpy' or 'numba'. If numba >= 0.17.0 is installed, how='numba' will compile the spa functions to machine code and run them multithreaded. + delta_t : float, optional, default 67.0 + Difference between terrestrial time and UT1. + If delta_t is None, uses spa.calculate_deltat + using times.year and times.month from pandas.DatetimeIndex. + For most simulations the default delta_t is sufficient. + *Note: delta_t = None will break code using nrel_numba, + this will be fixed in a future version.* numthreads : int, optional, default 4 Number of threads to use if how == 'numba'. @@ -575,9 +575,10 @@ def sun_rise_set_transit_ephem(times, latitude, longitude, trans = [] for thetime in times: thetime = thetime.to_pydatetime() - # pyephem drops timezone when converting to its internal datetime - # format, so handle timezone explicitly here - obs.date = ephem.Date(thetime - thetime.utcoffset()) + # older versions of pyephem ignore timezone when converting to its + # internal datetime format, so convert to UTC here to support + # all versions. GH #1449 + obs.date = ephem.Date(thetime.astimezone(datetime.timezone.utc)) sunrise.append(_ephem_to_timezone(rising(sun), tzinfo)) sunset.append(_ephem_to_timezone(setting(sun), tzinfo)) trans.append(_ephem_to_timezone(transit(sun), tzinfo)) @@ -972,13 +973,12 @@ def nrel_earthsun_distance(time, how='numpy', delta_t=67.0, numthreads=4): to machine code and run them multithreaded. delta_t : float, optional, default 67.0 + Difference between terrestrial time and UT1. If delta_t is None, uses spa.calculate_deltat using time.year and time.month from pandas.DatetimeIndex. - For most simulations specifing delta_t is sufficient. - Difference between terrestrial time and UT1. + For most simulations the default delta_t is sufficient. *Note: delta_t = None will break code using nrel_numba, this will be fixed in a future version.* - By default, use USNO historical data and predictions numthreads : int, optional, default 4 Number of threads to use if how == 'numba'. diff --git a/pvlib/spa.py b/pvlib/spa.py index 743df5ce8e..348c30eea9 100644 --- a/pvlib/spa.py +++ b/pvlib/spa.py @@ -1097,15 +1097,9 @@ def solar_position(unixtime, lat, lon, elev, pressure, temp, delta_t, temp : int or float avg. yearly temperature at location in degrees C; used for atmospheric correction - delta_t : float, optional - If delta_t is None, uses spa.calculate_deltat - using time.year and time.month from pandas.DatetimeIndex. - For most simulations specifing delta_t is sufficient. + delta_t : float Difference between terrestrial time and UT1. - *Note: delta_t = None will break code using nrel_numba, - this will be fixed in a future version. - By default, use USNO historical data and predictions - atmos_refrac : float, optional + atmos_refrac : float The approximate atmospheric refraction (in degrees) at sunrise and sunset. numthreads: int, optional, default 8 diff --git a/pvlib/tests/iotools/test_crn.py b/pvlib/tests/iotools/test_crn.py index b19888dda1..8d880e0432 100644 --- a/pvlib/tests/iotools/test_crn.py +++ b/pvlib/tests/iotools/test_crn.py @@ -83,7 +83,7 @@ def test_read_crn_problems(testfile_problems, columns_mapped, dtypes): '2020-07-06 13:10:00'], freq=None).tz_localize('UTC') values = np.array([ - [92821, 20200706, 1200, 20200706, 700, '3.0', -80.69, 28.62, 24.9, + [92821, 20200706, 1200, 20200706, 700, '3', -80.69, 28.62, 24.9, 0.0, np.nan, 0, 25.5, 'C', 0, 93.0, 0, nan, nan, 990, 0, 1.57, 0], [92821, 20200706, 1310, 20200706, 810, '2.623', -80.69, 28.62, 26.9, 0.0, 430.0, 0, 30.2, 'C', 0, 87.0, 0, nan, nan, 989, 0, diff --git a/pvlib/tests/iotools/test_psm3.py b/pvlib/tests/iotools/test_psm3.py index 92451a23f1..d151cfa6da 100644 --- a/pvlib/tests/iotools/test_psm3.py +++ b/pvlib/tests/iotools/test_psm3.py @@ -4,13 +4,14 @@ import os from pvlib.iotools import psm3 -from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY +from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY, assert_index_equal import numpy as np import pandas as pd import pytest from requests import HTTPError from io import StringIO import warnings +from pvlib._deprecation import pvlibDeprecationWarning TMY_TEST_DATA = DATA_DIR / 'test_psm3_tmy-2017.csv' YEAR_TEST_DATA = DATA_DIR / 'test_psm3_2017.csv' @@ -76,7 +77,8 @@ def assert_psm3_equal(data, metadata, expected): def test_get_psm3_tmy(nrel_api_key): """test get_psm3 with a TMY""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='tmy-2017') + PVLIB_EMAIL, names='tmy-2017', + map_variables=False) expected = pd.read_csv(TMY_TEST_DATA) assert_psm3_equal(data, metadata, expected) @@ -86,7 +88,8 @@ def test_get_psm3_tmy(nrel_api_key): def test_get_psm3_singleyear(nrel_api_key): """test get_psm3 with a single year""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2017', interval=30) + PVLIB_EMAIL, names='2017', + map_variables=False, interval=30) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) @@ -96,7 +99,8 @@ def test_get_psm3_singleyear(nrel_api_key): def test_get_psm3_5min(nrel_api_key): """test get_psm3 for 5-minute data""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2019', interval=5) + PVLIB_EMAIL, names='2019', interval=5, + map_variables=False) assert len(data) == 525600/5 first_day = data.loc['2019-01-01'] expected = pd.read_csv(YEAR_TEST_DATA_5MIN) @@ -108,7 +112,7 @@ def test_get_psm3_5min(nrel_api_key): def test_get_psm3_check_leap_day(nrel_api_key): data_2012, _ = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, names="2012", interval=60, - leap_day=True) + leap_day=True, map_variables=False) assert len(data_2012) == (8760 + 24) @@ -133,7 +137,7 @@ def test_get_psm3_tmy_errors( """ with pytest.raises(HTTPError) as excinfo: psm3.get_psm3(latitude, longitude, api_key, PVLIB_EMAIL, - names=names, interval=interval) + names=names, interval=interval, map_variables=False) # ensure the HTTPError caught isn't due to overuse of the API key assert "OVER_RATE_LIMIT" not in str(excinfo.value) @@ -149,13 +153,49 @@ def io_input(request): def test_parse_psm3(io_input): """test parse_psm3""" - data, metadata = psm3.parse_psm3(io_input) + data, metadata = psm3.parse_psm3(io_input, map_variables=False) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) def test_read_psm3(): """test read_psm3""" - data, metadata = psm3.read_psm3(MANUAL_TEST_DATA) + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=False) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) + + +def test_read_psm3_map_variables(): + """test read_psm3 map_variables=True""" + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=True) + columns_mapped = ['Year', 'Month', 'Day', 'Hour', 'Minute', 'dhi', 'dni', + 'ghi', 'dhi_clear', 'dni_clear', 'ghi_clear', + 'Cloud Type', 'Dew Point', 'solar_zenith', + 'Fill Flag', 'albedo', 'wind_speed', + 'precipitable_water', 'wind_direction', + 'relative_humidity', 'temp_air', 'pressure'] + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=True) + assert_index_equal(data.columns, pd.Index(columns_mapped)) + + +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_get_psm3_attribute_mapping(nrel_api_key): + """Test that pvlib names can be passed in as attributes and get correctly + reverse mapped to PSM3 names""" + data, meta = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, + names=2019, interval=60, + attributes=['ghi', 'wind_speed'], + map_variables=True) + assert 'ghi' in data.columns + assert 'wind_speed' in data.columns + assert 'latitude' in meta.keys() + assert 'longitude' in meta.keys() + assert 'altitude' in meta.keys() + + +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_psm3_variable_map_deprecation_warning(nrel_api_key): + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + _ = psm3.read_psm3(MANUAL_TEST_DATA) diff --git a/pvlib/tests/iotools/test_sodapro.py b/pvlib/tests/iotools/test_sodapro.py index 10f9a1e8c9..24e5ebbfcf 100644 --- a/pvlib/tests/iotools/test_sodapro.py +++ b/pvlib/tests/iotools/test_sodapro.py @@ -209,7 +209,7 @@ def test_get_cams(requests_mock, testfile, index, columns, values, dtypes, mock_response = test_file.read() # Specify the full URI of a specific example, this ensures that all of the # inputs are passing on correctly - url_test_cams = f'http://www.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=80;date_begin=2020-01-01;date_end=2020-05-04;time_ref=UT;summarization=P01M;username=pvlib-admin%2540googlegroups.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_{identifier}&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 + url_test_cams = f'https://www.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=80;date_begin=2020-01-01;date_end=2020-05-04;time_ref=UT;summarization=P01M;username=pvlib-admin%2540googlegroups.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_{identifier}&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 requests_mock.get(url_test_cams, text=mock_response, headers={'Content-Type': 'application/csv'}) @@ -254,7 +254,7 @@ def test_get_cams_bad_request(requests_mock): Please, register yourself at www.soda-pro.com """ - url_cams_bad_request = 'http://pro.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=-999;date_begin=2020-01-01;date_end=2020-05-04;time_ref=TST;summarization=PT01H;username=test%2540test.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 + url_cams_bad_request = 'https://pro.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=-999;date_begin=2020-01-01;date_end=2020-05-04;time_ref=TST;summarization=PT01H;username=test%2540test.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 requests_mock.get(url_cams_bad_request, text=mock_response_bad, headers={'Content-Type': 'application/xml'}) diff --git a/pvlib/tests/test_conftest.py b/pvlib/tests/test_conftest.py index a42d28d463..e848ed19c1 100644 --- a/pvlib/tests/test_conftest.py +++ b/pvlib/tests/test_conftest.py @@ -52,22 +52,21 @@ def test_use_fixture_with_decorator(some_data): 'assert_frame_equal']) @pytest.mark.parametrize('pd_version', ['1.0.0', '1.1.0']) @pytest.mark.parametrize('check_less_precise', [True, False]) -def test__check_pandas_assert_kwargs(mocker, monkeypatch, - function_name, pd_version, +def test__check_pandas_assert_kwargs(mocker, function_name, pd_version, check_less_precise): # test that conftest._check_pandas_assert_kwargs returns appropriate # kwargs for the assert_x_equal functions - # patch the pandas assert; not interested in actually calling them: - def patched_assert(*args, **kwargs): - pass + # NOTE: be careful about mixing mocker.patch and pytest.MonkeyPatch! + # they do not coordinate their cleanups, so it is safest to only + # use one or the other. GH #1447 - monkeypatch.setattr(pandas.testing, function_name, patched_assert) - # then attach a spy to it so we can see what args it is called with: - mocked_function = mocker.spy(pandas.testing, function_name) + # patch the pandas assert; not interested in actually calling them, + # plus we want to spy on how they get called. + spy = mocker.patch('pandas.testing.' + function_name) # patch pd.__version__ to exercise the two branches in # conftest._check_pandas_assert_kwargs - monkeypatch.setattr(pandas, '__version__', pd_version) + mocker.patch('pandas.__version__', new=pd_version) # finally, run the function and check what args got passed to pandas: assert_function = getattr(conftest, function_name) @@ -79,4 +78,4 @@ def patched_assert(*args, **kwargs): else: expected_kwargs = {'check_less_precise': check_less_precise} - mocked_function.assert_called_with(*args, **expected_kwargs) + spy.assert_called_once_with(*args, **expected_kwargs) diff --git a/pvlib/tests/test_forecast.py b/pvlib/tests/test_forecast.py index db2ed75154..4382666317 100644 --- a/pvlib/tests/test_forecast.py +++ b/pvlib/tests/test_forecast.py @@ -14,6 +14,8 @@ ) from .conftest import RERUNS, RERUNS_DELAY +from pvlib._deprecation import pvlibDeprecationWarning + pytestmark = pytest.mark.skipif(not has_siphon, reason='requires siphon') @@ -52,7 +54,8 @@ @requires_siphon @pytest.fixture(scope='module', params=_modelclasses) def model(request): - amodel = request.param() + with pytest.warns(pvlibDeprecationWarning): + amodel = request.param() try: raw_data = amodel.get_data(_latitude, _longitude, _start, _end) except Exception as e: @@ -90,7 +93,8 @@ def test_process_data(model): def test_bad_kwarg_get_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. - amodel = NAM() + with pytest.warns(pvlibDeprecationWarning): + amodel = NAM() data = amodel.get_data(_latitude, _longitude, _start, _end, bad_kwarg=False) assert not data.empty @@ -103,7 +107,8 @@ def test_bad_kwarg_get_data(): def test_bad_kwarg_get_processed_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. - amodel = NAM() + with pytest.warns(pvlibDeprecationWarning): + amodel = NAM() data = amodel.get_processed_data(_latitude, _longitude, _start, _end, bad_kwarg=False) assert not data.empty @@ -114,7 +119,8 @@ def test_bad_kwarg_get_processed_data(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_how_kwarg_get_processed_data(): - amodel = NAM() + with pytest.warns(pvlibDeprecationWarning): + amodel = NAM() data = amodel.get_processed_data(_latitude, _longitude, _start, _end, how='clearsky_scaling') assert not data.empty @@ -125,7 +131,8 @@ def test_how_kwarg_get_processed_data(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_vert_level(): - amodel = NAM() + with pytest.warns(pvlibDeprecationWarning): + amodel = NAM() vert_level = 5000 amodel.get_processed_data(_latitude, _longitude, _start, _end, vert_level=vert_level) @@ -136,7 +143,8 @@ def test_vert_level(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_datetime(): - amodel = NAM() + with pytest.warns(pvlibDeprecationWarning): + amodel = NAM() start = datetime.now(tz=timezone.utc) end = start + timedelta(days=1) amodel.get_processed_data(_latitude, _longitude, start, end) @@ -147,7 +155,8 @@ def test_datetime(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_queryvariables(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() new_variables = ['u-component_of_wind_height_above_ground'] data = amodel.get_data(_latitude, _longitude, _start, _end, query_variables=new_variables) @@ -156,16 +165,19 @@ def test_queryvariables(): @requires_siphon def test_latest(): - GFS(set_type='latest') + with pytest.warns(pvlibDeprecationWarning): + GFS(set_type='latest') @requires_siphon def test_full(): - GFS(set_type='full') + with pytest.warns(pvlibDeprecationWarning): + GFS(set_type='full') def test_temp_convert(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() data = pd.DataFrame({'temp_air': [273.15]}) data['temp_air'] = amodel.kelvin_to_celsius(data['temp_air']) @@ -183,27 +195,31 @@ def test_temp_convert(): def test_set_location(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() latitude, longitude = 32.2, -110.9 time = 'UTC' amodel.set_location(time, latitude, longitude) def test_set_query_time_range_tzfail(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() with pytest.raises(TypeError): amodel.set_query_time_range(datetime.now(), datetime.now()) def test_cloud_cover_to_transmittance_linear(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() assert_allclose(amodel.cloud_cover_to_transmittance_linear(0), 0.75) assert_allclose(amodel.cloud_cover_to_transmittance_linear(100), 0.0) assert_allclose(amodel.cloud_cover_to_transmittance_linear(0, 0.5), 0.5) def test_cloud_cover_to_ghi_linear(): - amodel = GFS() + with pytest.warns(pvlibDeprecationWarning): + amodel = GFS() ghi_clear = 1000 offset = 25 out = amodel.cloud_cover_to_ghi_linear(0, ghi_clear, offset=offset) diff --git a/pvlib/tracking.py b/pvlib/tracking.py index 732108dec2..951f2e886e 100644 --- a/pvlib/tracking.py +++ b/pvlib/tracking.py @@ -510,6 +510,9 @@ def singleaxis(apparent_zenith, apparent_azimuth, # Calculate surface_tilt dotproduct = (panel_norm_earth * projected_normal).sum(axis=1) + # for edge cases like axis_tilt=90, numpy's SIMD can produce values like + # dotproduct = (1 + 2e-16). Clip off the excess so that arccos works: + dotproduct = np.clip(dotproduct, -1, 1) surface_tilt = 90 - np.degrees(np.arccos(dotproduct)) # Bundle DataFrame for return values and filter for sun below horizon. diff --git a/setup.py b/setup.py index 7301f5eab6..96b4737515 100755 --- a/setup.py +++ b/setup.py @@ -39,15 +39,12 @@ URL = 'https://github.com/pvlib/pvlib-python' INSTALL_REQUIRES = ['numpy >= 1.16.0', - 'pandas >= 0.22.0', + 'pandas >= 0.25.0', 'pytz', 'requests', 'scipy >= 1.2.0', - 'h5py'] - -# include dataclasses as a dependency only on python 3.6 -if sys.version_info.major == 3 and sys.version_info.minor == 6: - INSTALL_REQUIRES.append('dataclasses') + 'h5py', + 'dataclasses; python_version < "3.7"'] TESTS_REQUIRE = ['nose', 'pytest', 'pytest-cov', 'pytest-mock', 'requests-mock', 'pytest-timeout', 'pytest-rerunfailures', @@ -56,8 +53,8 @@ 'optional': ['cython', 'ephem', 'netcdf4', 'nrel-pysam', 'numba', 'pvfactors', 'siphon', 'statsmodels', 'cftime >= 1.1.1'], - 'doc': ['ipython', 'matplotlib', 'sphinx == 3.1.2', - 'pydata-sphinx-theme == 0.8.0', 'sphinx-gallery', + 'doc': ['ipython', 'matplotlib', 'sphinx == 4.5.0', + 'pydata-sphinx-theme == 0.8.1', 'sphinx-gallery', 'docutils == 0.15.2', 'pillow', 'netcdf4', 'siphon', 'sphinx-toggleprompt >= 0.0.5', 'pvfactors'], 'test': TESTS_REQUIRE From 3d63ac3d2404daac3afbee7adb3122a3f950fec1 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Thu, 19 May 2022 15:32:25 +0200 Subject: [PATCH 6/8] Revert "Squashed commit of the following:" This reverts commit b313c64d03b834186b4af8c9a890d01ec6440e95. --- .github/workflows/asv_check.yml | 34 -------- README.md | 18 ++--- benchmarks/asv.conf.json | 4 +- ci/requirements-py36-min.yml | 2 +- ci/requirements-py36.yml | 2 +- ci/requirements-py37.yml | 2 +- ci/requirements-py38.yml | 2 +- ci/requirements-py39.yml | 2 +- docs/examples/iv-modeling/plot_singlediode.py | 2 +- docs/sphinx/source/conf.py | 7 +- docs/sphinx/source/reference/forecasting.rst | 6 -- docs/sphinx/source/user_guide/bifacial.rst | 4 +- docs/sphinx/source/user_guide/forecasts.rst | 19 ----- docs/sphinx/source/whatsnew/v0.9.1.rst | 30 +++---- docs/sphinx/source/whatsnew/v0.9.2.rst | 42 ---------- pvlib/forecast.py | 15 ---- pvlib/iotools/crn.py | 18 +---- pvlib/iotools/psm3.py | 79 ++++--------------- pvlib/iotools/sodapro.py | 16 ++-- pvlib/irradiance.py | 2 +- pvlib/location.py | 3 +- pvlib/solarposition.py | 36 ++++----- pvlib/spa.py | 10 ++- pvlib/tests/iotools/test_crn.py | 2 +- pvlib/tests/iotools/test_psm3.py | 56 ++----------- pvlib/tests/iotools/test_sodapro.py | 4 +- pvlib/tests/test_conftest.py | 19 ++--- pvlib/tests/test_forecast.py | 44 ++++------- pvlib/tracking.py | 3 - setup.py | 13 +-- 30 files changed, 124 insertions(+), 372 deletions(-) delete mode 100644 .github/workflows/asv_check.yml delete mode 100644 docs/sphinx/source/whatsnew/v0.9.2.rst diff --git a/.github/workflows/asv_check.yml b/.github/workflows/asv_check.yml deleted file mode 100644 index db0f11f4da..0000000000 --- a/.github/workflows/asv_check.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: asv - -# CI ASV CHECK is aimed to verify that the benchmarks execute without error. -on: [pull_request, push] - -jobs: - quick: - runs-on: ubuntu-latest - defaults: - run: - shell: bash -el {0} - - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install Python - uses: actions/setup-python@v3 - with: - python-version: '3.9.7' - - - name: Install asv - run: pip install asv==0.4.2 - - - name: Run asv benchmarks - run: | - cd benchmarks - asv machine --yes - asv run HEAD^! --quick --dry-run --show-stderr | sed "/failed$/ s/^/##[error]/" | tee benchmarks.log - if grep "failed" benchmarks.log > /dev/null ; then - exit 1 - fi - diff --git a/README.md b/README.md index 211eb7ff95..2911388d29 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Code Quality - +   lgtm quality grade @@ -46,7 +46,7 @@ Coverage - +   coveralls coverage @@ -55,19 +55,11 @@ - - Benchmarks - - - - - - Publications - - zenodo reference + + zenodo reference JOSS reference @@ -108,7 +100,7 @@ Installation ============ pvlib-python releases may be installed using the ``pip`` and ``conda`` tools. -Please see the [Installation page](https://pvlib-python.readthedocs.io/en/stable/user_guide/installation.html) of the documentation for complete instructions. +Please see the [Installation page](http://pvlib-python.readthedocs.io/en/stable/installation.html) of the documentation for complete instructions. Contributing diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 79773e928f..c7da42acf2 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -116,12 +116,12 @@ { "python": "3.6", "numpy": "1.16.0", - "pandas": "0.25.0", + "pandas": "0.22.0", "scipy": "1.2.0", // Note: these don't have a minimum in setup.py "h5py": "2.10.0", "ephem": "3.7.6.0", - "numba": "0.40.0", + "numba": "0.36.1", }, // latest versions available { diff --git a/ci/requirements-py36-min.yml b/ci/requirements-py36-min.yml index dfff0a9f97..495099cf8f 100644 --- a/ci/requirements-py36-min.yml +++ b/ci/requirements-py36-min.yml @@ -16,7 +16,7 @@ dependencies: - dataclasses - h5py==3.1.0 - numpy==1.16.0 - - pandas==0.25.0 + - pandas==0.22.0 - scipy==1.2.0 - pytest-rerunfailures # conda version is >3.6 - pytest-remotedata # conda package is 0.3.0, needs > 0.3.1 diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index 596a2e0bbd..295bedda7c 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.25.0 + - pandas >= 0.22.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 8a971d728a..241cafe128 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.25.0 + - pandas >= 0.22.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index db763c3d73..6ff5d2da0f 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -11,7 +11,7 @@ dependencies: - nose - numba - numpy >= 1.16.0 - - pandas >= 0.25.0 + - pandas >= 0.22.0 - pip - pytest - pytest-cov diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index 3621a737b6..32abde067a 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -11,7 +11,7 @@ dependencies: - nose # - numba # python 3.9 compat in early 2021 - numpy >= 1.16.0 - - pandas >= 0.25.0 + - pandas >= 0.22.0 - pip - pytest - pytest-cov diff --git a/docs/examples/iv-modeling/plot_singlediode.py b/docs/examples/iv-modeling/plot_singlediode.py index e9085b3341..c92d5e999e 100644 --- a/docs/examples/iv-modeling/plot_singlediode.py +++ b/docs/examples/iv-modeling/plot_singlediode.py @@ -103,7 +103,7 @@ for i, case in conditions.iterrows(): label = ( "$G_{eff}$ " + f"{case['Geff']} $W/m^2$\n" - "$T_{cell}$ " + f"{case['Tcell']} $\\degree C$" + "$T_{cell}$ " + f"{case['Tcell']} $C$" ) plt.plot(curve_info['v'][i], curve_info['i'][i], label=label) v_mp = curve_info['v_mp'][i] diff --git a/docs/sphinx/source/conf.py b/docs/sphinx/source/conf.py index fb5228c332..9826781d6d 100644 --- a/docs/sphinx/source/conf.py +++ b/docs/sphinx/source/conf.py @@ -20,9 +20,6 @@ # for generating GH links with linenumbers import inspect -# import distutils before calling pd.show_versions() -# https://github.com/pypa/setuptools/issues/3044 -import distutils # noqa: F401 import pandas as pd pd.show_versions() @@ -247,7 +244,7 @@ def setup(app): # In-line links to references as numbers in brackets. app.add_css_file("reference_format.css") # Add a warning banner at the top of the page if viewing the "latest" docs - app.add_js_file("version-alert.js") + app.add_javascript("version-alert.js") # -- Options for LaTeX output --------------------------------------------- @@ -346,6 +343,8 @@ def setup(app): 'matplotlib': ('https://matplotlib.org/stable', None), } +nbsphinx_allow_errors = True + ipython_warning_is_error = False # suppress "WARNING: Footnote [1] is not referenced." messages diff --git a/docs/sphinx/source/reference/forecasting.rst b/docs/sphinx/source/reference/forecasting.rst index 8b1e125c8a..ff4df7ed4d 100644 --- a/docs/sphinx/source/reference/forecasting.rst +++ b/docs/sphinx/source/reference/forecasting.rst @@ -3,12 +3,6 @@ Forecasting =========== -.. warning:: - - All functionality in the ``pvlib.forecast`` module is deprecated as of - pvlib v0.9.1. For details, see :ref:`forecasts`. - - Forecast models --------------- diff --git a/docs/sphinx/source/user_guide/bifacial.rst b/docs/sphinx/source/user_guide/bifacial.rst index b0299041ed..ec18d04a5a 100644 --- a/docs/sphinx/source/user_guide/bifacial.rst +++ b/docs/sphinx/source/user_guide/bifacial.rst @@ -24,7 +24,7 @@ irradiance: pvfactors ---------- +========= The `pvfactors `_ package calculates incident irradiance on the front and back surfaces of an array. pvfactors uses @@ -35,7 +35,7 @@ trackers. Infinite Sheds --------------- +============== The "infinite sheds" model [1] is a 2-dimensional model of irradiance on the front and rear surfaces of a PV array. The model assumes that the array diff --git a/docs/sphinx/source/user_guide/forecasts.rst b/docs/sphinx/source/user_guide/forecasts.rst index d61b40387a..a89904eccd 100644 --- a/docs/sphinx/source/user_guide/forecasts.rst +++ b/docs/sphinx/source/user_guide/forecasts.rst @@ -4,25 +4,6 @@ Forecasting *********** -.. warning:: - - The ``pvlib.forecast`` module is deprecated as of version ``0.9.1``. - - Because none of the current pvlib team members are able to continue - maintaining it, the functionality in ``pvlib.forecast`` is deprecated - and will be removed without replacement in a future version. If you - are interested in maintaining this functionality, please let us know. - - You can fetch forecast data yourself using ``siphon`` (see the - docs below this warning) and the code from pvlib v0.9.0 as a reference: - https://github.com/pvlib/pvlib-python/blob/v0.9.0/pvlib/forecast.py - - The `Solar Forecast Arbiter Core - `_ - offers similar (and more robust) forecast processing functionality - and may be a suitable replacement for some users. - - pvlib python provides a set of functions and classes that make it easy to obtain weather forecast data and convert that data into a PV power forecast. Users can retrieve standardized weather forecast data relevant diff --git a/docs/sphinx/source/whatsnew/v0.9.1.rst b/docs/sphinx/source/whatsnew/v0.9.1.rst index 2440f215c7..685a7880a4 100644 --- a/docs/sphinx/source/whatsnew/v0.9.1.rst +++ b/docs/sphinx/source/whatsnew/v0.9.1.rst @@ -1,26 +1,25 @@ .. _whatsnew_0910: -v0.9.1 (March 29, 2022) ------------------------ +v0.9.1 (TBD) +-------------------------- + +Breaking changes +~~~~~~~~~~~~~~~~ Deprecations ~~~~~~~~~~~~ * Moved :py:func:`pvlib.bifacial.pvfactors_timeseries` to :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries`. - :py:mod:`pvlib.bifacial` is now a sub-package. (:pull:`717`) + :py:module:`pvlib.bifacial` is now a sub-package. (:pull:`717`) * :py:func:`pvlib.modelchain.basic_chain` is deprecated. See :py:meth:`pvlib.modelchain.ModelChain.with_pvwatts` and :py:meth:`pvlib.modelchain.ModelChain.with_sapm` for alternative simplified :py:class:`~pvlib.modelchain.ModelChain` interfaces, although note that the inputs do not directly translate. (:pull:`1401`) -* All functionality in the ``pvlib.forecast`` module is deprecated. - For details, see :ref:`forecasts`. (:issue:`1057`, :pull:`1426`) Enhancements ~~~~~~~~~~~~ -* Added ``map_variables`` option to :py:func:`pvlib.iotools.get_psm3` and - :py:func:`pvlib.iotools.read_psm3` (:pull:`1374`) -* Added ``pvlib.bifacial.infinite_sheds``, containing a model for irradiance +* Added `pvlib.bifacial.infinite_sheds`, containing a model for irradiance on front and back surfaces of bifacial arrays. (:pull:`717`) * Added ``map_variables`` option to :func:`~pvlib.iotools.read_crn` (:pull:`1368`) * Added :py:func:`pvlib.temperature.prilliman` for modeling cell temperature @@ -34,7 +33,7 @@ Bug fixes values were returned when the sun is behind the plane of array (:issue:`1348`, :pull:`1349`) * Fixed bug in :py:func:`pvlib.iotools.get_pvgis_hourly` where the ``optimal_surface_tilt`` argument was not being passed to the ``optimalinclination`` request parameter (:pull:`1356`) -* Fixed bug in :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries` where scalar ``surface_tilt`` +* Fixed bug in :py:func:`pvlib.bifacial.pvfactors_timeseries` where scalar ``surface_tilt`` and ``surface_azimuth`` inputs caused an error (:issue:`1127`, :issue:`1332`, :pull:`1361`) * Added -99999 to list of values to map to nan in :func:`~pvlib.iotools.read_crn` (:issue:`1372`, :pull:`1368`) @@ -55,19 +54,14 @@ Documentation * Fix documentation return error in :py:meth:`pvlib.forecast.ForecastModel.cloud_cover_to_transmittance_linear` (:issue:`1367`, :pull:`1370`) * Add gallery example illustrating bifacial simulation using the - :py:func:`pvlib.bifacial.pvfactors.pvfactors_timeseries` function and either + :py:func:`pvlib.bifacial.pvfactors_timeseries` function and either :py:class:`pvlib.modelchain.ModelChain` or procedural functions. (:pull:`1394`) * Fix some typos (:pull:`1414`) -* Clarify the descriptions of ``delta_t`` in several solar position functions - (:pull:`1429`) -* Changed $C$ to $\\degree C$ in the plot_singlediode.py file (:pull:`1434`) Requirements ~~~~~~~~~~~~ * numpy >= 1.16.0 is now required for all python versions. (:pull:`1400`) -* Fix an installation issue with ``dataclasses`` on python 3.6 - (:issue:`1302`, :pull:`1422`) Contributors @@ -82,9 +76,3 @@ Contributors * Saurabh Aneja (:ghuser:`spaneja`) * Jack Kelly (:ghuser:`JackKelly`) * Somasree Majumder(:ghuser:`soma2000-lang`) -* Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) -* Will Holmgren (:ghuser:`wholmgren`) -* Mark Mikofksi (:ghuser:`mikofski`) -* Will Hobbs (:ghuser:`williamhobbs`) -* Uday Guntupalli (:ghuser:`uguntupalli`) -* Shashwata Roy (:ghuser:`RoyCoding8`) diff --git a/docs/sphinx/source/whatsnew/v0.9.2.rst b/docs/sphinx/source/whatsnew/v0.9.2.rst deleted file mode 100644 index 05b7ed58ab..0000000000 --- a/docs/sphinx/source/whatsnew/v0.9.2.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. _whatsnew_0920: - -v0.9.2 (TBD) ------------------------ - -Deprecations -~~~~~~~~~~~~ - -Enhancements -~~~~~~~~~~~~ - -Bug fixes -~~~~~~~~~ -* :py:func:`pvlib.irradiance.get_total_irradiance` and - :py:func:`pvlib.solarposition.spa_python` now raise an error instead - of silently ignoring unknown parameters (:pull:`1437`) -* Fix a bug in :py:func:`pvlib.solarposition.sun_rise_set_transit_ephem` - where passing localized timezones with large UTC offsets could return - rise/set/transit times for the wrong day in recent versions of ``ephem`` - (:issue:`1449`, :pull:`1448`) - - -Testing -~~~~~~~ - -Documentation -~~~~~~~~~~~~~ - -Benchmarking -~~~~~~~~~~~~~ -* Updated version of numba in asv.conf from 0.36.1 to 0.40.0 to solve numba/numpy conflict. (:issue:`1439`, :pull:`1440`) -* Added a basic CI asv check (:issue:`1446`, :pull:`1454`) - -Requirements -~~~~~~~~~~~~ -* Minimum pandas version increased to v0.25.0, released July 18, 2019. (:pull:`1448`) - -Contributors -~~~~~~~~~~~~ -* Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) -* Chencheng Luo (:ghuser:`roger-lcc`) -* Prajwal Borkar (:ghuser:`PrajwalBorkar`) diff --git a/pvlib/forecast.py b/pvlib/forecast.py index ce80e0ad74..634ae75ab2 100644 --- a/pvlib/forecast.py +++ b/pvlib/forecast.py @@ -15,7 +15,6 @@ from siphon.ncss import NCSS import warnings -from pvlib._deprecation import deprecated warnings.warn( @@ -23,15 +22,7 @@ 'The API may change, the functionality may be consolidated into an io ' 'module, or the module may be separated into its own package.') -_forecast_deprecated = deprecated( - since='0.9.1', - removal='a future release', - addendum='For details, see https://pvlib-python.readthedocs.io/en/stable/user_guide/forecasts.html' # noqa: E501 -) -# don't decorate the base class to prevent the subclasses from showing -# duplicate warnings: -# @_forecast_deprecated class ForecastModel: """ An object for querying and holding forecast model information for @@ -693,7 +684,6 @@ def gust_to_speed(self, data, scaling=1/1.4): return wind_speed -@_forecast_deprecated class GFS(ForecastModel): """ Subclass of the ForecastModel class representing GFS @@ -795,7 +785,6 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] -@_forecast_deprecated class HRRR_ESRL(ForecastModel): # noqa: N801 """ Subclass of the ForecastModel class representing @@ -886,7 +875,6 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] -@_forecast_deprecated class NAM(ForecastModel): """ Subclass of the ForecastModel class representing NAM @@ -968,7 +956,6 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] -@_forecast_deprecated class HRRR(ForecastModel): """ Subclass of the ForecastModel class representing HRRR @@ -1057,7 +1044,6 @@ def process_data(self, data, cloud_cover='total_clouds', **kwargs): return data[self.output_variables] -@_forecast_deprecated class NDFD(ForecastModel): """ Subclass of the ForecastModel class representing NDFD forecast @@ -1126,7 +1112,6 @@ def process_data(self, data, **kwargs): return data[self.output_variables] -@_forecast_deprecated class RAP(ForecastModel): """ Subclass of the ForecastModel class representing RAP forecast model. diff --git a/pvlib/iotools/crn.py b/pvlib/iotools/crn.py index 90e1d6d6b4..fe46bb69d2 100644 --- a/pvlib/iotools/crn.py +++ b/pvlib/iotools/crn.py @@ -2,7 +2,6 @@ """ import pandas as pd -import numpy as np HEADERS = [ @@ -108,24 +107,13 @@ def read_crn(filename, map_variables=True): """ # read in data - # TODO: instead of parsing as strings and then post-processing, switch to - # pd.read_fwf(..., dtype=dict(zip(HEADERS, DTYPES)), skip_blank_lines=True) - # when our minimum pandas >= 1.2.0 (skip_blank_lines bug for <1.2.0). - # As a workaround, parse all values as strings, then drop NaN, then cast - # to the appropriate dtypes, and mask "sentinal" NaN (e.g. -9999.0) data = pd.read_fwf(filename, header=None, names=HEADERS, widths=WIDTHS, - dtype=str) - - # drop empty (bad) lines + na_values=NAN_DICT) + # Remove rows with all nans data = data.dropna(axis=0, how='all') - - # can't set dtypes in read_fwf because int cols can't contain NaN, so - # do it here instead + # set dtypes here because dtype kwarg not supported in read_fwf until 0.20 data = data.astype(dict(zip(HEADERS, DTYPES))) - # finally, replace -999 values with NaN - data = data.replace(NAN_DICT, value=np.nan) - # set index # UTC_TIME does not have leading 0s, so must zfill(4) to comply # with %H%M format diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index a8f9781c22..10ff919c56 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -1,3 +1,4 @@ + """ Get PSM3 TMY see https://developer.nrel.gov/docs/solar/nsrdb/psm3_data_download/ @@ -7,8 +8,6 @@ import requests import pandas as pd from json import JSONDecodeError -import warnings -from pvlib._deprecation import pvlibDeprecationWarning NSRDB_API_BASE = "https://developer.nrel.gov" PSM_URL = NSRDB_API_BASE + "/api/nsrdb/v2/solar/psm3-download.csv" @@ -21,31 +20,12 @@ 'surface_pressure', 'wind_direction', 'wind_speed') PVLIB_PYTHON = 'pvlib python' -# Dictionary mapping PSM3 names to pvlib names -VARIABLE_MAP = { - 'GHI': 'ghi', - 'DHI': 'dhi', - 'DNI': 'dni', - 'Clearsky GHI': 'ghi_clear', - 'Clearsky DHI': 'dhi_clear', - 'Clearsky DNI': 'dni_clear', - 'Solar Zenith Angle': 'solar_zenith', - 'Temperature': 'temp_air', - 'Relative Humidity': 'relative_humidity', - 'Dew point': 'temp_dew', - 'Pressure': 'pressure', - 'Wind Direction': 'wind_direction', - 'Wind Speed': 'wind_speed', - 'Surface Albedo': 'albedo', - 'Precipitable Water': 'precipitable_water', -} - def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, attributes=ATTRIBUTES, leap_day=False, full_name=PVLIB_PYTHON, - affiliation=PVLIB_PYTHON, map_variables=None, timeout=30): + affiliation=PVLIB_PYTHON, timeout=30): """ - Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB + Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB is described in [1]_ and the PSM3 API is described in [2]_, [3]_, and [4]_. .. versionchanged:: 0.9.0 @@ -68,23 +48,19 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, PSM3 API parameter specifing year or TMY variant to download, see notes below for options interval : int, {60, 5, 15, 30} - interval size in minutes, must be 5, 15, 30 or 60. Only used for + interval size in minutes, must be 5, 15, 30 or 60. Only used for single-year requests (i.e., it is ignored for tmy/tgy/tdy requests). attributes : list of str, optional meteorological fields to fetch. If not specified, defaults to ``pvlib.iotools.psm3.ATTRIBUTES``. See references [2]_, [3]_, and [4]_ - for lists of available fields. Alternatively, pvlib names may also be - used (e.g. 'ghi' rather than 'GHI'); see :const:`VARIABLE_MAP`. + for lists of available fields. leap_day : boolean, default False - include leap day in the results. Only used for single-year requests + include leap day in the results. Only used for single-year requests (i.e., it is ignored for tmy/tgy/tdy requests). full_name : str, default 'pvlib python' optional affiliation : str, default 'pvlib python' optional - map_variables: boolean, optional - When true, renames columns of the Dataframe to pvlib variable names - where applicable. See variable :const:`VARIABLE_MAP`. timeout : int, default 30 time in seconds to wait for server response before timeout @@ -120,15 +96,14 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, +===========+=============================================================+ | Year | 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, | | | 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, | - | | 2018, 2019, 2020 | + | | 2018, 2019 | +-----------+-------------------------------------------------------------+ | TMY | tmy, tmy-2016, tmy-2017, tdy-2017, tgy-2017, | | | tmy-2018, tdy-2018, tgy-2018, tmy-2019, tdy-2019, tgy-2019 | - | | tmy-2020, tdy-2020, tgy-2020 | +-----------+-------------------------------------------------------------+ .. warning:: PSM3 is limited to data found in the NSRDB, please consult the - references below for locations with available data. Additionally, + references below for locations with available data. Additionally, querying data with < 30-minute resolution uses a different API endpoint with fewer available fields (see [4]_). @@ -158,13 +133,6 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, # convert to string to accomodate integer years being passed in names = str(names) - # convert pvlib names in attributes to psm3 convention (reverse mapping) - # unlike psm3 columns, attributes are lower case and with underscores - amap = {value: key.lower().replace(' ', '_') for (key, value) in - VARIABLE_MAP.items()} - attributes = [amap.get(a, a) for a in attributes] - attributes = list(set(attributes)) # remove duplicate values - # required query-string parameters for request to PSM3 API params = { 'api_key': api_key, @@ -199,12 +167,12 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, # the CSV is in the response content as a UTF-8 bytestring # to use pandas we need to create a file buffer from the response fbuf = io.StringIO(response.content.decode('utf-8')) - return parse_psm3(fbuf, map_variables) + return parse_psm3(fbuf) -def parse_psm3(fbuf, map_variables=None): +def parse_psm3(fbuf): """ - Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB + Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. .. versionchanged:: 0.9.0 @@ -216,9 +184,6 @@ def parse_psm3(fbuf, map_variables=None): ---------- fbuf: file-like object File-like object containing data to read. - map_variables: bool - When true, renames columns of the Dataframe to pvlib variable names - where applicable. See variable VARIABLE_MAP. Returns ------- @@ -331,25 +296,12 @@ def parse_psm3(fbuf, map_variables=None): tz = 'Etc/GMT%+d' % -metadata['Time Zone'] data.index = pd.DatetimeIndex(dtidx).tz_localize(tz) - if map_variables is None: - warnings.warn( - 'PSM3 variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.11.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning) - map_variables = False - if map_variables: - data = data.rename(columns=VARIABLE_MAP) - metadata['latitude'] = metadata.pop('Latitude') - metadata['longitude'] = metadata.pop('Longitude') - metadata['altitude'] = metadata.pop('Elevation') - return data, metadata -def read_psm3(filename, map_variables=None): +def read_psm3(filename): """ - Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB + Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. .. versionchanged:: 0.9.0 @@ -361,9 +313,6 @@ def read_psm3(filename, map_variables=None): ---------- filename: str Filename of a file containing data to read. - map_variables: bool - When true, renames columns of the Dataframe to pvlib variable names - where applicable. See variable VARIABLE_MAP. Returns ------- @@ -385,5 +334,5 @@ def read_psm3(filename, map_variables=None): `_ """ with open(str(filename), 'r') as fbuf: - content = parse_psm3(fbuf, map_variables) + content = parse_psm3(fbuf) return content diff --git a/pvlib/iotools/sodapro.py b/pvlib/iotools/sodapro.py index a5d3e1efd1..abb06da8bb 100644 --- a/pvlib/iotools/sodapro.py +++ b/pvlib/iotools/sodapro.py @@ -153,11 +153,11 @@ def get_cams(latitude, longitude, start, end, email, identifier='mcclear', References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ .. [3] `CAMS McClear Automatic Access - `_ + `_ """ try: time_step_str = TIME_STEPS_MAP[time_step] @@ -185,7 +185,7 @@ def get_cams(latitude, longitude, start, end, email, identifier='mcclear', email = email.replace('@', '%2540') # Format email address identifier = 'get_{}'.format(identifier.lower()) # Format identifier str - base_url = f"https://{server}/service/wps" + base_url = f"http://{server}/service/wps" data_inputs_dict = { 'latitude': latitude, @@ -263,9 +263,9 @@ def parse_cams(fbuf, integrated=False, label=None, map_variables=True): References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ """ metadata = {} # Initial lines starting with # contain metadata @@ -366,9 +366,9 @@ def read_cams(filename, integrated=False, label=None, map_variables=True): References ---------- .. [1] `CAMS Radiation Service Info - `_ + `_ .. [2] `CAMS McClear Service Info - `_ + `_ """ with open(str(filename), 'r') as fbuf: content = parse_cams(fbuf, integrated, label, map_variables) diff --git a/pvlib/irradiance.py b/pvlib/irradiance.py index 3de4d96f65..c5c3b1c095 100644 --- a/pvlib/irradiance.py +++ b/pvlib/irradiance.py @@ -306,7 +306,7 @@ def get_total_irradiance(surface_tilt, surface_azimuth, dni, ghi, dhi, dni_extra=None, airmass=None, albedo=.25, surface_type=None, model='isotropic', - model_perez='allsitescomposite1990'): + model_perez='allsitescomposite1990', **kwargs): r""" Determine total in-plane irradiance and its beam, sky diffuse and ground reflected components, using the specified sky diffuse irradiance model. diff --git a/pvlib/location.py b/pvlib/location.py index ddd2d1b96a..104fccac89 100644 --- a/pvlib/location.py +++ b/pvlib/location.py @@ -231,7 +231,8 @@ def get_clearsky(self, times, model='ineichen', solar_position=None, pressure = atmosphere.alt2pres(self.altitude) if solar_position is None: - solar_position = self.get_solarposition(times, pressure=pressure) + solar_position = self.get_solarposition(times, pressure=pressure, + **kwargs) apparent_zenith = solar_position['apparent_zenith'] apparent_elevation = solar_position['apparent_elevation'] diff --git a/pvlib/solarposition.py b/pvlib/solarposition.py index cdcacd7ec6..4047187533 100644 --- a/pvlib/solarposition.py +++ b/pvlib/solarposition.py @@ -22,7 +22,6 @@ import pandas as pd import scipy.optimize as so import warnings -import datetime from pvlib import atmosphere from pvlib.tools import datetime_to_djd, djd_to_datetime @@ -276,7 +275,7 @@ def _spa_python_import(how): def spa_python(time, latitude, longitude, altitude=0, pressure=101325, temperature=12, delta_t=67.0, - atmos_refract=None, how='numpy', numthreads=4): + atmos_refract=None, how='numpy', numthreads=4, **kwargs): """ Calculate the solar position using a python implementation of the NREL SPA algorithm. @@ -305,13 +304,13 @@ def spa_python(time, latitude, longitude, temperature : int or float, optional, default 12 avg. yearly air temperature in degrees C. delta_t : float, optional, default 67.0 - Difference between terrestrial time and UT1. If delta_t is None, uses spa.calculate_deltat using time.year and time.month from pandas.DatetimeIndex. - For most simulations the default delta_t is sufficient. + For most simulations specifing delta_t is sufficient. + Difference between terrestrial time and UT1. *Note: delta_t = None will break code using nrel_numba, this will be fixed in a future version.* - The USNO has historical and forecasted delta_t [3]_. + The USNO has historical and forecasted delta_t [3]. atmos_refrac : None or float, optional, default None The approximate atmospheric refraction (in degrees) at sunrise and sunset. @@ -406,17 +405,18 @@ def sun_rise_set_transit_spa(times, latitude, longitude, how='numpy', Latitude in degrees, positive north of equator, negative to south longitude : float Longitude in degrees, positive east of prime meridian, negative to west + delta_t : float, optional + If delta_t is None, uses spa.calculate_deltat + using times.year and times.month from pandas.DatetimeIndex. + For most simulations specifing delta_t is sufficient. + Difference between terrestrial time and UT1. + delta_t = None will break code using nrel_numba, + this will be fixed in a future version. + By default, use USNO historical data and predictions how : str, optional, default 'numpy' Options are 'numpy' or 'numba'. If numba >= 0.17.0 is installed, how='numba' will compile the spa functions to machine code and run them multithreaded. - delta_t : float, optional, default 67.0 - Difference between terrestrial time and UT1. - If delta_t is None, uses spa.calculate_deltat - using times.year and times.month from pandas.DatetimeIndex. - For most simulations the default delta_t is sufficient. - *Note: delta_t = None will break code using nrel_numba, - this will be fixed in a future version.* numthreads : int, optional, default 4 Number of threads to use if how == 'numba'. @@ -575,10 +575,9 @@ def sun_rise_set_transit_ephem(times, latitude, longitude, trans = [] for thetime in times: thetime = thetime.to_pydatetime() - # older versions of pyephem ignore timezone when converting to its - # internal datetime format, so convert to UTC here to support - # all versions. GH #1449 - obs.date = ephem.Date(thetime.astimezone(datetime.timezone.utc)) + # pyephem drops timezone when converting to its internal datetime + # format, so handle timezone explicitly here + obs.date = ephem.Date(thetime - thetime.utcoffset()) sunrise.append(_ephem_to_timezone(rising(sun), tzinfo)) sunset.append(_ephem_to_timezone(setting(sun), tzinfo)) trans.append(_ephem_to_timezone(transit(sun), tzinfo)) @@ -973,12 +972,13 @@ def nrel_earthsun_distance(time, how='numpy', delta_t=67.0, numthreads=4): to machine code and run them multithreaded. delta_t : float, optional, default 67.0 - Difference between terrestrial time and UT1. If delta_t is None, uses spa.calculate_deltat using time.year and time.month from pandas.DatetimeIndex. - For most simulations the default delta_t is sufficient. + For most simulations specifing delta_t is sufficient. + Difference between terrestrial time and UT1. *Note: delta_t = None will break code using nrel_numba, this will be fixed in a future version.* + By default, use USNO historical data and predictions numthreads : int, optional, default 4 Number of threads to use if how == 'numba'. diff --git a/pvlib/spa.py b/pvlib/spa.py index 348c30eea9..743df5ce8e 100644 --- a/pvlib/spa.py +++ b/pvlib/spa.py @@ -1097,9 +1097,15 @@ def solar_position(unixtime, lat, lon, elev, pressure, temp, delta_t, temp : int or float avg. yearly temperature at location in degrees C; used for atmospheric correction - delta_t : float + delta_t : float, optional + If delta_t is None, uses spa.calculate_deltat + using time.year and time.month from pandas.DatetimeIndex. + For most simulations specifing delta_t is sufficient. Difference between terrestrial time and UT1. - atmos_refrac : float + *Note: delta_t = None will break code using nrel_numba, + this will be fixed in a future version. + By default, use USNO historical data and predictions + atmos_refrac : float, optional The approximate atmospheric refraction (in degrees) at sunrise and sunset. numthreads: int, optional, default 8 diff --git a/pvlib/tests/iotools/test_crn.py b/pvlib/tests/iotools/test_crn.py index 8d880e0432..b19888dda1 100644 --- a/pvlib/tests/iotools/test_crn.py +++ b/pvlib/tests/iotools/test_crn.py @@ -83,7 +83,7 @@ def test_read_crn_problems(testfile_problems, columns_mapped, dtypes): '2020-07-06 13:10:00'], freq=None).tz_localize('UTC') values = np.array([ - [92821, 20200706, 1200, 20200706, 700, '3', -80.69, 28.62, 24.9, + [92821, 20200706, 1200, 20200706, 700, '3.0', -80.69, 28.62, 24.9, 0.0, np.nan, 0, 25.5, 'C', 0, 93.0, 0, nan, nan, 990, 0, 1.57, 0], [92821, 20200706, 1310, 20200706, 810, '2.623', -80.69, 28.62, 26.9, 0.0, 430.0, 0, 30.2, 'C', 0, 87.0, 0, nan, nan, 989, 0, diff --git a/pvlib/tests/iotools/test_psm3.py b/pvlib/tests/iotools/test_psm3.py index d151cfa6da..92451a23f1 100644 --- a/pvlib/tests/iotools/test_psm3.py +++ b/pvlib/tests/iotools/test_psm3.py @@ -4,14 +4,13 @@ import os from pvlib.iotools import psm3 -from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY, assert_index_equal +from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY import numpy as np import pandas as pd import pytest from requests import HTTPError from io import StringIO import warnings -from pvlib._deprecation import pvlibDeprecationWarning TMY_TEST_DATA = DATA_DIR / 'test_psm3_tmy-2017.csv' YEAR_TEST_DATA = DATA_DIR / 'test_psm3_2017.csv' @@ -77,8 +76,7 @@ def assert_psm3_equal(data, metadata, expected): def test_get_psm3_tmy(nrel_api_key): """test get_psm3 with a TMY""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='tmy-2017', - map_variables=False) + PVLIB_EMAIL, names='tmy-2017') expected = pd.read_csv(TMY_TEST_DATA) assert_psm3_equal(data, metadata, expected) @@ -88,8 +86,7 @@ def test_get_psm3_tmy(nrel_api_key): def test_get_psm3_singleyear(nrel_api_key): """test get_psm3 with a single year""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2017', - map_variables=False, interval=30) + PVLIB_EMAIL, names='2017', interval=30) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) @@ -99,8 +96,7 @@ def test_get_psm3_singleyear(nrel_api_key): def test_get_psm3_5min(nrel_api_key): """test get_psm3 for 5-minute data""" data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2019', interval=5, - map_variables=False) + PVLIB_EMAIL, names='2019', interval=5) assert len(data) == 525600/5 first_day = data.loc['2019-01-01'] expected = pd.read_csv(YEAR_TEST_DATA_5MIN) @@ -112,7 +108,7 @@ def test_get_psm3_5min(nrel_api_key): def test_get_psm3_check_leap_day(nrel_api_key): data_2012, _ = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, names="2012", interval=60, - leap_day=True, map_variables=False) + leap_day=True) assert len(data_2012) == (8760 + 24) @@ -137,7 +133,7 @@ def test_get_psm3_tmy_errors( """ with pytest.raises(HTTPError) as excinfo: psm3.get_psm3(latitude, longitude, api_key, PVLIB_EMAIL, - names=names, interval=interval, map_variables=False) + names=names, interval=interval) # ensure the HTTPError caught isn't due to overuse of the API key assert "OVER_RATE_LIMIT" not in str(excinfo.value) @@ -153,49 +149,13 @@ def io_input(request): def test_parse_psm3(io_input): """test parse_psm3""" - data, metadata = psm3.parse_psm3(io_input, map_variables=False) + data, metadata = psm3.parse_psm3(io_input) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) def test_read_psm3(): """test read_psm3""" - data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=False) + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA) expected = pd.read_csv(YEAR_TEST_DATA) assert_psm3_equal(data, metadata, expected) - - -def test_read_psm3_map_variables(): - """test read_psm3 map_variables=True""" - data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=True) - columns_mapped = ['Year', 'Month', 'Day', 'Hour', 'Minute', 'dhi', 'dni', - 'ghi', 'dhi_clear', 'dni_clear', 'ghi_clear', - 'Cloud Type', 'Dew Point', 'solar_zenith', - 'Fill Flag', 'albedo', 'wind_speed', - 'precipitable_water', 'wind_direction', - 'relative_humidity', 'temp_air', 'pressure'] - data, metadata = psm3.read_psm3(MANUAL_TEST_DATA, map_variables=True) - assert_index_equal(data.columns, pd.Index(columns_mapped)) - - -@pytest.mark.remote_data -@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) -def test_get_psm3_attribute_mapping(nrel_api_key): - """Test that pvlib names can be passed in as attributes and get correctly - reverse mapped to PSM3 names""" - data, meta = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, - names=2019, interval=60, - attributes=['ghi', 'wind_speed'], - map_variables=True) - assert 'ghi' in data.columns - assert 'wind_speed' in data.columns - assert 'latitude' in meta.keys() - assert 'longitude' in meta.keys() - assert 'altitude' in meta.keys() - - -@pytest.mark.remote_data -@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) -def test_psm3_variable_map_deprecation_warning(nrel_api_key): - with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): - _ = psm3.read_psm3(MANUAL_TEST_DATA) diff --git a/pvlib/tests/iotools/test_sodapro.py b/pvlib/tests/iotools/test_sodapro.py index 24e5ebbfcf..10f9a1e8c9 100644 --- a/pvlib/tests/iotools/test_sodapro.py +++ b/pvlib/tests/iotools/test_sodapro.py @@ -209,7 +209,7 @@ def test_get_cams(requests_mock, testfile, index, columns, values, dtypes, mock_response = test_file.read() # Specify the full URI of a specific example, this ensures that all of the # inputs are passing on correctly - url_test_cams = f'https://www.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=80;date_begin=2020-01-01;date_end=2020-05-04;time_ref=UT;summarization=P01M;username=pvlib-admin%2540googlegroups.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_{identifier}&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 + url_test_cams = f'http://www.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=80;date_begin=2020-01-01;date_end=2020-05-04;time_ref=UT;summarization=P01M;username=pvlib-admin%2540googlegroups.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_{identifier}&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 requests_mock.get(url_test_cams, text=mock_response, headers={'Content-Type': 'application/csv'}) @@ -254,7 +254,7 @@ def test_get_cams_bad_request(requests_mock): Please, register yourself at www.soda-pro.com """ - url_cams_bad_request = 'https://pro.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=-999;date_begin=2020-01-01;date_end=2020-05-04;time_ref=TST;summarization=PT01H;username=test%2540test.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 + url_cams_bad_request = 'http://pro.soda-is.com/service/wps?DataInputs=latitude=55.7906;longitude=12.5251;altitude=-999;date_begin=2020-01-01;date_end=2020-05-04;time_ref=TST;summarization=PT01H;username=test%2540test.com;verbose=false&Service=WPS&Request=Execute&Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation' # noqa: E501 requests_mock.get(url_cams_bad_request, text=mock_response_bad, headers={'Content-Type': 'application/xml'}) diff --git a/pvlib/tests/test_conftest.py b/pvlib/tests/test_conftest.py index e848ed19c1..a42d28d463 100644 --- a/pvlib/tests/test_conftest.py +++ b/pvlib/tests/test_conftest.py @@ -52,21 +52,22 @@ def test_use_fixture_with_decorator(some_data): 'assert_frame_equal']) @pytest.mark.parametrize('pd_version', ['1.0.0', '1.1.0']) @pytest.mark.parametrize('check_less_precise', [True, False]) -def test__check_pandas_assert_kwargs(mocker, function_name, pd_version, +def test__check_pandas_assert_kwargs(mocker, monkeypatch, + function_name, pd_version, check_less_precise): # test that conftest._check_pandas_assert_kwargs returns appropriate # kwargs for the assert_x_equal functions - # NOTE: be careful about mixing mocker.patch and pytest.MonkeyPatch! - # they do not coordinate their cleanups, so it is safest to only - # use one or the other. GH #1447 + # patch the pandas assert; not interested in actually calling them: + def patched_assert(*args, **kwargs): + pass - # patch the pandas assert; not interested in actually calling them, - # plus we want to spy on how they get called. - spy = mocker.patch('pandas.testing.' + function_name) + monkeypatch.setattr(pandas.testing, function_name, patched_assert) + # then attach a spy to it so we can see what args it is called with: + mocked_function = mocker.spy(pandas.testing, function_name) # patch pd.__version__ to exercise the two branches in # conftest._check_pandas_assert_kwargs - mocker.patch('pandas.__version__', new=pd_version) + monkeypatch.setattr(pandas, '__version__', pd_version) # finally, run the function and check what args got passed to pandas: assert_function = getattr(conftest, function_name) @@ -78,4 +79,4 @@ def test__check_pandas_assert_kwargs(mocker, function_name, pd_version, else: expected_kwargs = {'check_less_precise': check_less_precise} - spy.assert_called_once_with(*args, **expected_kwargs) + mocked_function.assert_called_with(*args, **expected_kwargs) diff --git a/pvlib/tests/test_forecast.py b/pvlib/tests/test_forecast.py index 4382666317..db2ed75154 100644 --- a/pvlib/tests/test_forecast.py +++ b/pvlib/tests/test_forecast.py @@ -14,8 +14,6 @@ ) from .conftest import RERUNS, RERUNS_DELAY -from pvlib._deprecation import pvlibDeprecationWarning - pytestmark = pytest.mark.skipif(not has_siphon, reason='requires siphon') @@ -54,8 +52,7 @@ @requires_siphon @pytest.fixture(scope='module', params=_modelclasses) def model(request): - with pytest.warns(pvlibDeprecationWarning): - amodel = request.param() + amodel = request.param() try: raw_data = amodel.get_data(_latitude, _longitude, _start, _end) except Exception as e: @@ -93,8 +90,7 @@ def test_process_data(model): def test_bad_kwarg_get_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. - with pytest.warns(pvlibDeprecationWarning): - amodel = NAM() + amodel = NAM() data = amodel.get_data(_latitude, _longitude, _start, _end, bad_kwarg=False) assert not data.empty @@ -107,8 +103,7 @@ def test_bad_kwarg_get_data(): def test_bad_kwarg_get_processed_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. - with pytest.warns(pvlibDeprecationWarning): - amodel = NAM() + amodel = NAM() data = amodel.get_processed_data(_latitude, _longitude, _start, _end, bad_kwarg=False) assert not data.empty @@ -119,8 +114,7 @@ def test_bad_kwarg_get_processed_data(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_how_kwarg_get_processed_data(): - with pytest.warns(pvlibDeprecationWarning): - amodel = NAM() + amodel = NAM() data = amodel.get_processed_data(_latitude, _longitude, _start, _end, how='clearsky_scaling') assert not data.empty @@ -131,8 +125,7 @@ def test_how_kwarg_get_processed_data(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_vert_level(): - with pytest.warns(pvlibDeprecationWarning): - amodel = NAM() + amodel = NAM() vert_level = 5000 amodel.get_processed_data(_latitude, _longitude, _start, _end, vert_level=vert_level) @@ -143,8 +136,7 @@ def test_vert_level(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_datetime(): - with pytest.warns(pvlibDeprecationWarning): - amodel = NAM() + amodel = NAM() start = datetime.now(tz=timezone.utc) end = start + timedelta(days=1) amodel.get_processed_data(_latitude, _longitude, start, end) @@ -155,8 +147,7 @@ def test_datetime(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_queryvariables(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() new_variables = ['u-component_of_wind_height_above_ground'] data = amodel.get_data(_latitude, _longitude, _start, _end, query_variables=new_variables) @@ -165,19 +156,16 @@ def test_queryvariables(): @requires_siphon def test_latest(): - with pytest.warns(pvlibDeprecationWarning): - GFS(set_type='latest') + GFS(set_type='latest') @requires_siphon def test_full(): - with pytest.warns(pvlibDeprecationWarning): - GFS(set_type='full') + GFS(set_type='full') def test_temp_convert(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() data = pd.DataFrame({'temp_air': [273.15]}) data['temp_air'] = amodel.kelvin_to_celsius(data['temp_air']) @@ -195,31 +183,27 @@ def test_temp_convert(): def test_set_location(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() latitude, longitude = 32.2, -110.9 time = 'UTC' amodel.set_location(time, latitude, longitude) def test_set_query_time_range_tzfail(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() with pytest.raises(TypeError): amodel.set_query_time_range(datetime.now(), datetime.now()) def test_cloud_cover_to_transmittance_linear(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() assert_allclose(amodel.cloud_cover_to_transmittance_linear(0), 0.75) assert_allclose(amodel.cloud_cover_to_transmittance_linear(100), 0.0) assert_allclose(amodel.cloud_cover_to_transmittance_linear(0, 0.5), 0.5) def test_cloud_cover_to_ghi_linear(): - with pytest.warns(pvlibDeprecationWarning): - amodel = GFS() + amodel = GFS() ghi_clear = 1000 offset = 25 out = amodel.cloud_cover_to_ghi_linear(0, ghi_clear, offset=offset) diff --git a/pvlib/tracking.py b/pvlib/tracking.py index 951f2e886e..732108dec2 100644 --- a/pvlib/tracking.py +++ b/pvlib/tracking.py @@ -510,9 +510,6 @@ def singleaxis(apparent_zenith, apparent_azimuth, # Calculate surface_tilt dotproduct = (panel_norm_earth * projected_normal).sum(axis=1) - # for edge cases like axis_tilt=90, numpy's SIMD can produce values like - # dotproduct = (1 + 2e-16). Clip off the excess so that arccos works: - dotproduct = np.clip(dotproduct, -1, 1) surface_tilt = 90 - np.degrees(np.arccos(dotproduct)) # Bundle DataFrame for return values and filter for sun below horizon. diff --git a/setup.py b/setup.py index 96b4737515..7301f5eab6 100755 --- a/setup.py +++ b/setup.py @@ -39,12 +39,15 @@ URL = 'https://github.com/pvlib/pvlib-python' INSTALL_REQUIRES = ['numpy >= 1.16.0', - 'pandas >= 0.25.0', + 'pandas >= 0.22.0', 'pytz', 'requests', 'scipy >= 1.2.0', - 'h5py', - 'dataclasses; python_version < "3.7"'] + 'h5py'] + +# include dataclasses as a dependency only on python 3.6 +if sys.version_info.major == 3 and sys.version_info.minor == 6: + INSTALL_REQUIRES.append('dataclasses') TESTS_REQUIRE = ['nose', 'pytest', 'pytest-cov', 'pytest-mock', 'requests-mock', 'pytest-timeout', 'pytest-rerunfailures', @@ -53,8 +56,8 @@ 'optional': ['cython', 'ephem', 'netcdf4', 'nrel-pysam', 'numba', 'pvfactors', 'siphon', 'statsmodels', 'cftime >= 1.1.1'], - 'doc': ['ipython', 'matplotlib', 'sphinx == 4.5.0', - 'pydata-sphinx-theme == 0.8.1', 'sphinx-gallery', + 'doc': ['ipython', 'matplotlib', 'sphinx == 3.1.2', + 'pydata-sphinx-theme == 0.8.0', 'sphinx-gallery', 'docutils == 0.15.2', 'pillow', 'netcdf4', 'siphon', 'sphinx-toggleprompt >= 0.0.5', 'pvfactors'], 'test': TESTS_REQUIRE From 5c01bc49dfe987df406b7c3f81fd64508f5a7567 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Thu, 19 May 2022 15:36:24 +0200 Subject: [PATCH 7/8] Update whatsnew --- docs/sphinx/source/whatsnew/v0.9.2.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.2.rst b/docs/sphinx/source/whatsnew/v0.9.2.rst index 05b7ed58ab..88f93401c6 100644 --- a/docs/sphinx/source/whatsnew/v0.9.2.rst +++ b/docs/sphinx/source/whatsnew/v0.9.2.rst @@ -8,6 +8,10 @@ Deprecations Enhancements ~~~~~~~~~~~~ +* :py:func:`pvlib.iotools.read_surfrad` now also accepts remote files + with https links in addition to files on the SURFRAD FTP server + (:pull:`1459`) + Bug fixes ~~~~~~~~~ @@ -37,6 +41,7 @@ Requirements Contributors ~~~~~~~~~~~~ +* Adam R. Jensen (:ghuser:`AdamRJensen`) * Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) * Chencheng Luo (:ghuser:`roger-lcc`) -* Prajwal Borkar (:ghuser:`PrajwalBorkar`) +* Prajwal Borkar (:ghuser:`PrajwalBorkar`) From 8efab4743e63b4a56d0a636675c9b5003d5fcb94 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Wed, 25 May 2022 10:29:17 +0200 Subject: [PATCH 8/8] Update read_surfrad documentation --- pvlib/iotools/surfrad.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pvlib/iotools/surfrad.py b/pvlib/iotools/surfrad.py index fdc406b748..7f4d86e46a 100644 --- a/pvlib/iotools/surfrad.py +++ b/pvlib/iotools/surfrad.py @@ -44,7 +44,7 @@ def read_surfrad(filename, map_variables=True): Parameters ---------- filename: str - Filepath or url. + Filepath or URL. URL can be either FTP or HTTP. map_variables: bool When true, renames columns of the Dataframe to pvlib variable names where applicable. See variable :const:`VARIABLE_MAP`. @@ -113,7 +113,8 @@ def read_surfrad(filename, map_variables=True): ======================= ====== ========================================== See README files located in the station directories in the SURFRAD - data archives [2]_ for details on SURFRAD daily data files. + data archives [2]_ for details on SURFRAD daily data files. In addition to + the FTP server, the SURFRAD files are also available via HTTP access [3]_. References ---------- @@ -122,6 +123,8 @@ def read_surfrad(filename, map_variables=True): `SURFRAD Homepage `_ .. [2] NOAA SURFRAD Data Archive `SURFRAD Archive `_ + .. [3] `NOAA SURFRAD HTTP Index + `_ """ if str(filename).startswith('ftp') or str(filename).startswith('http'): req = Request(filename)