Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def run_location_simulate(
segment_name,
persons_merged,
location_sample_df,
skim_dict,
dest_size_terms,
model_settings,
chunk_size, trace_label):
"""
run location model on location_sample annotated with mode_choice logsum
to select a dest zone from sample alternatives
"""
assert not persons_merged.empty
model_spec = simulate.read_model_spec(file_name=model_settings['SPEC'])
# FIXME - MEMORY HACK - only include columns actually used in spec
chooser_columns = model_settings['SIMULATE_CHOOSER_COLUMNS']
choosers = persons_merged[chooser_columns]
alt_dest_col_name = model_settings["ALT_DEST_COL_NAME"]
# alternatives are pre-sampled and annotated with logsums and pick_count
# but we have to merge additional alt columns into alt sample list
alternatives = \
pd.merge(location_sample_df, dest_size_terms,
left_on=alt_dest_col_name, right_index=True, how="left")
logger.info("Running %s with %d persons" % (trace_label, len(choosers)))
# create wrapper with keys for this lookup - in this case there is a TAZ in the choosers
def run_destination_sample(
spec_segment_name,
tours,
persons_merged,
model_settings,
skim_dict,
destination_size_terms,
chunk_size, trace_label):
model_spec_file_name = model_settings['SAMPLE_SPEC']
model_spec = simulate.read_model_spec(file_name=model_spec_file_name)
model_spec = model_spec[[spec_segment_name]]
# merge persons into tours
choosers = pd.merge(tours, persons_merged, left_on='person_id', right_index=True, how='left')
# FIXME - MEMORY HACK - only include columns actually used in spec
chooser_columns = model_settings['SIMULATE_CHOOSER_COLUMNS']
choosers = choosers[chooser_columns]
constants = config.get_model_constants(model_settings)
sample_size = model_settings["SAMPLE_SIZE"]
alt_dest_col_name = model_settings["ALT_DEST_COL_NAME"]
logger.info("running %s with %d tours", trace_label, len(choosers))
# create wrapper with keys for this lookup - in this case there is a workplace_taz
def tour_mode_choice_spec(model_settings):
assert 'SPEC' in model_settings
return simulate.read_model_spec(file_name=model_settings['SPEC'])
def run_destination_simulate(
spec_segment_name,
tours,
persons_merged,
destination_sample,
model_settings,
skim_dict,
destination_size_terms,
chunk_size, trace_label):
"""
run destination_simulate on tour_destination_sample
annotated with mode_choice logsum to select a destination from sample alternatives
"""
model_spec_file_name = model_settings['SPEC']
model_spec = simulate.read_model_spec(file_name=model_spec_file_name)
model_spec = model_spec[[spec_segment_name]]
# merge persons into tours
choosers = pd.merge(tours,
persons_merged,
left_on='person_id', right_index=True, how='left')
# FIXME - MEMORY HACK - only include columns actually used in spec
chooser_columns = model_settings['SIMULATE_CHOOSER_COLUMNS']
choosers = choosers[chooser_columns]
alt_dest_col_name = model_settings["ALT_DEST_COL_NAME"]
origin_col_name = model_settings['CHOOSER_ORIG_COL_NAME']
# alternatives are pre-sampled and annotated with logsums and pick_count
# but we have to merge size_terms column into alt sample list
destination_sample['size_term'] = \
and then select a sample subset of potential locations
The sample subset is generated by making multiple choices ( number of choices)
which results in sample containing up to choices for each choose (e.g. person)
and a pick_count indicating how many times that choice was selected for that chooser.)
person_id, dest_TAZ, rand, pick_count
23750, 14, 0.565502716034, 4
23750, 16, 0.711135838871, 6
...
23751, 12, 0.408038878552, 1
23751, 14, 0.972732479292, 2
"""
assert not persons_merged.empty
model_spec = simulate.read_model_spec(file_name=model_settings['SAMPLE_SPEC'])
# FIXME - MEMORY HACK - only include columns actually used in spec
chooser_columns = model_settings['SIMULATE_CHOOSER_COLUMNS']
choosers = persons_merged[chooser_columns]
alternatives = dest_size_terms
sample_size = model_settings["SAMPLE_SIZE"]
alt_dest_col_name = model_settings["ALT_DEST_COL_NAME"]
logger.info("Running %s with %d persons" % (trace_label, len(choosers.index)))
# create wrapper with keys for this lookup - in this case there is a TAZ in the choosers
# and a TAZ in the alternatives which get merged during interaction
# (logit.interaction_dataset suffixes duplicate chooser column with '_chooser')
# the skims will be available under the name "skims" for any @ expressions
def cdap_indiv_spec():
"""
spec to compute the activity utilities for each individual hh member
with no interactions with other household members taken into account
"""
return simulate.read_model_spec(file_name='cdap_indiv_and_hhsize1.csv')
def atwork_subtour_scheduling(
tours,
persons_merged,
tdd_alts,
skim_dict,
chunk_size,
trace_hh_id):
"""
This model predicts the departure time and duration of each activity for at work subtours tours
"""
trace_label = 'atwork_subtour_scheduling'
model_settings = config.read_model_settings('tour_scheduling_atwork.yaml')
model_spec = simulate.read_model_spec(file_name='tour_scheduling_atwork.csv')
persons_merged = persons_merged.to_frame()
tours = tours.to_frame()
subtours = tours[tours.tour_category == 'atwork']
# - if no atwork subtours
if subtours.shape[0] == 0:
tracing.no_results(trace_label)
return
logger.info("Running %s with %d tours", trace_label, len(subtours))
# preprocessor
constants = config.get_model_constants(model_settings)
od_skim_wrapper = skim_dict.wrap('origin', 'destination')
def joint_tour_scheduling(
tours,
persons_merged,
tdd_alts,
chunk_size,
trace_hh_id):
"""
This model predicts the departure time and duration of each joint tour
"""
trace_label = 'joint_tour_scheduling'
model_settings = config.read_model_settings('joint_tour_scheduling.yaml')
model_spec = simulate.read_model_spec(file_name='tour_scheduling_joint.csv')
tours = tours.to_frame()
joint_tours = tours[tours.tour_category == 'joint']
# - if no joint tours
if joint_tours.shape[0] == 0:
tracing.no_results(trace_label)
return
# use inject.get_table as this won't exist if there are no joint_tours
joint_tour_participants = inject.get_table('joint_tour_participants').to_frame()
persons_merged = persons_merged.to_frame()
logger.info("Running %s with %d joint tours", trace_label, joint_tours.shape[0])