Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Since variables are labelled with an integer instrument
number, Observations are named with their variable and
instrument number found in the source files.
e.g. A SRML file contains two columns labelled, 1001, and
1002. These columns represent GHI at instrument 1 and
instrument 2 respectively. The `pvlib.iotools` package
converts these to 'ghi_1' and 'ghi_2' for us. We use these
labels to differentiate between measurements recorded by
different instruments.
"""
# Request ~month old data at initialization to ensure we get a response.
start = pd.Timestamp.now() - pd.Timedelta('30 days')
end = start
try:
extra_params = common.decode_extra_parameters(site)
except ValueError:
logger.warning('Cannot create reference observations at MIDC site '
f'{site.name}, missing required parameters.')
return
# use site name without network here to build
# a name with the original column label rather than
# the SFA variable
site_name = common.site_name_no_network(site)
try:
site_df = fetch(api, site, start, end)
except error.HTTPError:
logger.error('Could not find data to create observations '
f'for SRML site {site_name}.')
return
else:
if site_df is None:
The end of the period to request data for.
realtime : bool
Whether or not to look for realtime data. Note that this data is
raw, unverified data from the instruments.
Returns
-------
data : pandas.DataFrame
All of the requested data concatenated into a single DataFrame.
"""
if realtime:
url_format = REALTIME_URL
else:
url_format = ARCHIVE_URL
# load extra parameters for api arguments.
extra_params = common.decode_extra_parameters(site)
abbreviation = extra_params['network_api_abbreviation']
single_day_dfs = []
for day in pd.date_range(start, end):
filename = url_format.format(abrv=abbreviation,
year=day.year,
year_2d=day.strftime('%y'),
jday=day.strftime('%j'))
logger.info(f'Requesting data for SOLRAD site {site.name}'
f' on {day.strftime("%Y%m%d")}.')
try:
# Only get dataframe from the returned tuple
solrad_day = iotools.read_solrad(filename)
except URLError:
logger.warning(f'Could not retrieve SOLRAD data for site '
f'{site.name} on {day.strftime("%Y%m%d")}.')
logger.debug(f'Failed SOLRAD URL: {filename}.')
Returns
-------
datamodel.Site
The created site object.
"""
# get a reference to network before we serialize extra_parameters
network = site['extra_parameters']['network']
network_handler = NETWORKHANDLER_MAP.get(network)
if network_handler is None:
logger.warning(f'Unrecognized network, {network} on Site '
f'{site["name"]} Observations cannot be '
'automatically generated.')
return
site.update({'extra_parameters': json.dumps(site['extra_parameters'])})
site_name = f"{network} {common.clean_name(site['name'])}"
existing = common.existing_sites(api)
if site_name in existing:
logger.info('Site, %s, already exists', site_name)
created = existing[site_name]
else:
site['name'] = site_name
site_to_create = Site.from_dict(site)
try:
created = api.create_site(site_to_create)
except HTTPError as e:
logger.error(f"Failed to create Site {site['name']}.")
logger.debug(f'HTTP Error: {e.response.text}')
return False
else:
logger.info(f'Created Site {created.name} successfully.')
network_handler.initialize_site_observations(api, created)
try:
def get_filename(site, year):
"""Get the applicable file name for CRN a site on a given date.
"""
extra_params = common.decode_extra_parameters(site)
network_api_id = extra_params['network_api_id']
filename = f'{year}/CRNS0101-05-{year}-{network_api_id}.txt'
return CRN_URL + filename
at each SOLRAD site between start and end.
api : solarforecastarbiter.io.api.APISession
An active Reference user session.
sites : list of solarforecastarbiter.datamodel.Site
List of all reference sites as Objects
observations : list of solarforecastarbiter.datamodel.Observation
List of all reference observations.
start : datetime
The beginning of the period to request data for.
end : datetime
The end of the period to request data for.
"""
solrad_sites = common.filter_by_networks(sites, 'NOAA SOLRAD')
for site in solrad_sites:
common.update_site_observations(
api, fetch, site, observations, start, end)
Returns
-------
datamodel.Site
The created site object.
"""
# get a reference to network before we serialize extra_parameters
network = site['extra_parameters']['network']
network_handler = NETWORKHANDLER_MAP.get(network)
if network_handler is None:
logger.warning(f'Unrecognized network, {network} on Site '
f'{site["name"]} Observations cannot be '
'automatically generated.')
return
site.update({'extra_parameters': json.dumps(site['extra_parameters'])})
site_name = f"{network} {common.clean_name(site['name'])}"
existing = common.existing_sites(api)
if site_name in existing:
logger.info('Site, %s, already exists', site_name)
created = existing[site_name]
else:
site['name'] = site_name
site_to_create = Site.from_dict(site)
try:
created = api.create_site(site_to_create)
except HTTPError as e:
logger.error(f"Failed to create Site {site['name']}.")
logger.debug(f'HTTP Error: {e.response.text}')
return False
else:
logger.info(f'Created Site {created.name} successfully.')
network_handler.initialize_site_observations(api, created)
def adjust_site_parameters(site):
"""Inserts modeling parameters for sites with pv measurments
Parameters
----------
site: dict
Returns
-------
dict
Copy of inputs plus a new key 'modeling_parameters'.
"""
return common.apply_json_site_parameters(DEFAULT_SITEFILE, site)