Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
processed_links = AttrDict()
for link in links_in:
loc_from, loc_to = [i.strip() for i in link.split(',')]
# Skip this link entirely if it has been told not to exist
if not links_in[link].get('exists', True):
continue
# Also skip this link - and warn about it - if it links to a
# now-inexistant (because removed) location
if (loc_from not in locations.keys() or loc_to not in locations.keys()):
warnings.append(
'Not building the link {},{} because one or both of its '
'locations have been removed from the model by setting '
'``exists: false``'.format(loc_from, loc_to)
)
continue
processed_transmission_techs = AttrDict()
for tech_name in links_in[link].techs:
# Skip techs that have been told not to exist
# for this particular link
if not links_in[link].get_key('techs.{}.exists'.format(tech_name), True):
continue
if tech_name not in processed_transmission_techs:
tech_settings = AttrDict()
# Combine model-wide settings from all parent groups
for parent in reversed(modelrun_techs[tech_name].inheritance):
tech_settings.union(
tech_groups_in[parent],
allow_override=True
)
# Now overwrite with the tech's own model-wide settings
tech_settings.union(
techs_in[tech_name],
def generate_simple_sets(model_run):
"""
Generate basic sets for a given pre-processed ``model_run``.
Parameters
----------
model_run : AttrDict
"""
sets = AttrDict()
flat_techs = model_run.techs.as_dict(flat=True)
flat_locations = model_run.locations.as_dict(flat=True)
sets.resources = set(flatten_list(
v for k, v in flat_techs.items()
if '.carrier' in k
))
sets.carriers = sets.resources - set(['resource'])
sets.carrier_tiers = set(
key.split('.carrier_')[1]
for key in flat_techs.keys()
if '.carrier_' in key
)
isinstance(r, list) and
any([i in tech_config.constraints for i in r])
)
if not single_ok and not multiple_ok:
errors.append(
'`{}` at `{}` fails to define '
'all required constraints: {}'.format(tech_id, loc_id, required)
)
# Warn if defining a carrier ratio for a conversion_plus tech,
# but applying it to a carrier that isn't one of the carriers specified by that tech
# e.g. carrier_ratios.carrier_in_2.cooling when cooling isn't a carrier`
defined_carriers = get_all_carriers(model_run.techs[tech_id].essentials)
carriers_in_ratios = [
i.split('.')[-1] for i in
tech_config.constraints.get_key('carrier_ratios', AttrDict()).as_dict_flat().keys()
]
for carrier in carriers_in_ratios:
if carrier not in defined_carriers:
model_warnings.append(
'Tech `{t}` gives a carrier ratio for `{c}`, but does not actually '
'configure `{c}` as a carrier.'.format(t=tech_id, c=carrier)
)
# If the technology involves storage, warn when energy_cap and storage_cap aren't connected
energy_cap_per_storage_cap_params = [
'charge_rate', 'energy_cap_per_storage_cap_min',
'energy_cap_per_storage_cap_max', 'energy_cap_per_storage_cap_equals'
]
if (loc_id + '::' + tech_id in model_run.sets.loc_techs_store
and not any(i in tech_config.constraints.keys() for i in energy_cap_per_storage_cap_params)):
logger.info(
# `timesteps` set is built from the results of timeseries_data processing
sets.timesteps = list(model_run.timesteps.astype(str))
model_run.del_key('timesteps')
# `techlists` are strings with comma-separated techs used for grouping in
# some model-wide constraints
sets.techlists = set()
for k in model_run.model.get_key('group_share', {}).keys():
sets.techlists.add(k)
# `constraint_groups` are the group names per constraint that is defined
# at a group level
sets.group_constraints = set()
group_constraints = AttrDict({
name: data for name, data in model_run['group_constraints'].items()
if data.get("exists", True)
})
if len(group_constraints.keys()) > 0:
sets.group_constraints.update(
i.split('.')[1] for i in group_constraints.as_dict_flat().keys()
if i.split('.')[1] not in ['techs', 'locs']
)
for constr in sets.group_constraints:
sets['group_names_' + constr] = set(
k for k, v in group_constraints.items()
if constr in v.keys()
)
return sets
def apply_overrides(config, scenario=None, override_dict=None):
"""
Generate processed Model configuration, applying any scenarios overrides.
Parameters
----------
config : AttrDict
a model configuration AttrDict
scenario : str, optional
override_dict : str or dict or AttrDict, optional
If a YAML string, converted to AttrDict
"""
debug_comments = AttrDict()
config_model = AttrDict.from_yaml(os.path.join(
os.path.dirname(calliope.__file__), 'config', 'defaults.yaml'
))
# Interpret timeseries_data_path as relative
config.model.timeseries_data_path = relative_path(
config.config_path, config.model.timeseries_data_path
)
# FutureWarning: check if config includes an explicit objective cost class.
# Added in 0.6.4-dev, to be removed in v0.7.0-dev.
has_explicit_cost_class = isinstance(config.get_key('run.objective_options.cost_class', None), dict)
# The input files are allowed to override other model defaults
config_model.union(config, allow_override=True)
new_tech_config = convert_subdict(v, conversion_dict['tech_config'])
if 'constraints_per_distance' in v:
# Convert loss to efficiency
if 'e_loss' in v.constraints_per_distance:
v.constraints_per_distance.e_loss = 1 - v.constraints_per_distance.e_loss
new_tech_config.update(
convert_subdict(
v.constraints_per_distance,
conversion_dict['tech_constraints_per_distance_config']
)
)
# Costs are a little more involved -- need to get each cost class
# as a subdict and merge the results back together
new_cost_dict = AttrDict()
if 'costs' in v:
for cost_class in v.costs:
new_cost_dict[cost_class] = convert_subdict(v.costs[cost_class], conversion_dict['tech_costs_config'])
if 'costs_per_distance' in v:
for cost_class in v.costs_per_distance:
# FIXME update not overwrite
per_distance_config = convert_subdict(v.costs_per_distance[cost_class], conversion_dict['tech_costs_per_distance_config'])
if cost_class in new_cost_dict:
new_cost_dict[cost_class].union(per_distance_config)
else:
new_cost_dict[cost_class] = per_distance_config
if 'depreciation' in v:
# 'depreciation.interest.{cost_class}' goes to 'costs.{cost_class}.interest_rate'
if 'interest' in v.depreciation:
for cost_class, interest in v.depreciation.interest.items():
new_cost_dict.set_key(
# 5) Fully populate timeseries data
# Raises ModelErrors if there are problems with timeseries data at this stage
model_run['timeseries_data'], model_run['timesteps'] = (
process_timeseries_data(config, model_run)
)
# 6) Grab additional relevant bits from run and model config
model_run['run'] = config['run']
model_run['model'] = config['model']
model_run['group_constraints'] = config.get('group_constraints', {})
# 7) Initialize sets
all_sets = sets.generate_simple_sets(model_run)
all_sets.union(sets.generate_loc_tech_sets(model_run, all_sets))
all_sets = AttrDict({k: list(v) for k, v in all_sets.items()})
model_run['sets'] = all_sets
model_run['constraint_sets'] = constraint_sets.generate_constraint_sets(model_run)
# 8) Final sense-checking
final_check_comments, warning_messages, errors = checks.check_final(model_run)
debug_comments.union(final_check_comments)
exceptions.print_warnings_and_raise_errors(warnings=warning_messages, errors=errors)
# 9) Build a debug data dict with comments and the original configs
debug_data = AttrDict({
'comments': debug_comments,
'config_initial': config,
})
return model_run, debug_data