Skip to content

Commit

Permalink
Merge pull request #481 from SANDAG/develop
Browse files Browse the repository at this point in the history
  • Loading branch information
bstabler committed Oct 20, 2021
2 parents 94ed92e + fea3db5 commit 34348ed
Show file tree
Hide file tree
Showing 182 changed files with 645,122 additions and 1,655 deletions.
2 changes: 2 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ jobs:
env: TEST_SUITE=activitysim/examples/example_psrc/test
- name: "SANDAG Example"
env: TEST_SUITE=activitysim/examples/example_sandag/test
- name: "SANDAG Cross-Border Example"
env: TEST_SUITE=activitysim/examples/example_sandag_xborder/test

- stage: Estimation Mode
name: "Larch Test"
Expand Down
2 changes: 2 additions & 0 deletions activitysim/abm/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
from . import parking_location_choice
from . import stop_frequency
from . import tour_mode_choice
from . import tour_od_choice
from . import tour_scheduling_probabilistic
from . import trip_destination
from . import trip_mode_choice
from . import trip_purpose
Expand Down
1 change: 1 addition & 0 deletions activitysim/abm/models/atwork_subtour_mode_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ def atwork_subtour_mode_choice(

if dest_col not in choices_df:
choices_df[dest_col] = np.nan if pd.api.types.is_numeric_dtype(skim_cache[c]) else ''

choices_df[dest_col].where(choices_df.tour_mode != mode, skim_cache[c], inplace=True)

if estimator:
Expand Down
6 changes: 3 additions & 3 deletions activitysim/abm/models/initialize_los.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def initialize_los(network_los):
np.copyto(data, np.nan)


def compute_utilities_for_atttribute_tuple(network_los, scalar_attributes, data, chunk_size, trace_label):
def compute_utilities_for_attribute_tuple(network_los, scalar_attributes, data, chunk_size, trace_label):

# scalar_attributes is a dict of attribute name/value pairs for this combination
# (e.g. {'demographic_segment': 0, 'tod': 'AM', 'access_mode': 'walk'})
Expand Down Expand Up @@ -142,7 +142,6 @@ def compute_utilities_for_atttribute_tuple(network_los, scalar_attributes, data,

for i, chooser_chunk, chunk_trace_label \
in chunk.adaptive_chunked_choosers(choosers_df, chunk_size, trace_label, chunk_tag=chunk_tag):

# we should count choosers_df as chunk overhead since its pretty big and was custom made for compute_utilities
assert chooser_chunk._is_view # otherwise copying it is wasteful
chooser_chunk = chooser_chunk.copy()
Expand Down Expand Up @@ -230,7 +229,7 @@ def initialize_tvpb(network_los, attribute_combinations, chunk_size):
offset = network_los.tvpb.uid_calculator.get_skim_offset(scalar_attributes)
tuple_trace_label = tracing.extend_trace_label(trace_label, f'offset{offset}')

compute_utilities_for_atttribute_tuple(network_los, scalar_attributes, data, chunk_size, tuple_trace_label)
compute_utilities_for_attribute_tuple(network_los, scalar_attributes, data, chunk_size, tuple_trace_label)

# make sure we populated the entire offset
assert not any_uninitialized(data.reshape(uid_calculator.skim_shape)[offset], lock)
Expand All @@ -246,6 +245,7 @@ def initialize_tvpb(network_los, attribute_combinations, chunk_size):
# (the other processes don't have to wait, since we were sliced by attribute combination
# and they must wait to coalesce at the end of the multiprocessing_step)
# FIXME testing entire array is costly in terms of RAM)

while any_uninitialized(data, lock):
logger.debug(f"{trace_label}.{multiprocessing.current_process().name} waiting for other processes"
f" to populate {num_uninitialized(data, lock)} uninitialized data values")
Expand Down
6 changes: 5 additions & 1 deletion activitysim/abm/models/initialize_tours.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,11 @@ def initialize_tours(network_los, households, persons, trace_hh_id):
model_settings=model_settings.get('annotate_tours'),
trace_label=tracing.extend_trace_label(trace_label, 'annotate_tours'))

tours = patch_tour_ids(tours)
skip_patch_tour_ids = model_settings.get('skip_patch_tour_ids', False)
if skip_patch_tour_ids:
pass
else:
tours = patch_tour_ids(tours)
assert tours.index.name == 'tour_id'

# replace table function with dataframe
Expand Down
98 changes: 4 additions & 94 deletions activitysim/abm/models/stop_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,104 +12,14 @@
from activitysim.core import inject
from activitysim.core import expressions

from activitysim.abm.models.util.canonical_ids import set_trip_index

from activitysim.core.util import assign_in_place
from activitysim.core.util import reindex

from .util import estimation

from .util import estimation, trip

logger = logging.getLogger(__name__)


@inject.injectable()
def stop_frequency_alts():
# alt file for building trips even though simulation is simple_simulate not interaction_simulate
file_path = config.config_file_path('stop_frequency_alternatives.csv')
df = pd.read_csv(file_path, comment='#')
df.set_index('alt', inplace=True)
return df


def process_trips(tours, stop_frequency_alts):

OUTBOUND_ALT = 'out'
assert OUTBOUND_ALT in stop_frequency_alts.columns

# get the actual alternatives for each person - have to go back to the
# stop_frequency_alts dataframe to get this - the stop_frequency choice
# column has the index values for the chosen alternative

trips = stop_frequency_alts.loc[tours.stop_frequency]

# assign tour ids to the index
trips.index = tours.index

"""
::
tours.stop_frequency => proto trips table
________________________________________________________
stop_frequency | out in
tour_id | tour_id
954910 1out_1in | 954910 1 1
985824 0out_1in | 985824 0 1
"""

# reformat with the columns given below
trips = trips.stack().reset_index()
trips.columns = ['tour_id', 'direction', 'trip_count']

# tours legs have one more leg than stop
trips.trip_count += 1

# prefer direction as boolean
trips['outbound'] = trips.direction == OUTBOUND_ALT

"""
tour_id direction trip_count outbound
0 954910 out 2 True
1 954910 in 2 False
2 985824 out 1 True
3 985824 in 2 False
"""

# now do a repeat and a take, so if you have two trips of given type you
# now have two rows, and zero trips yields zero rows
trips = trips.take(np.repeat(trips.index.values, trips.trip_count.values))
trips = trips.reset_index(drop=True)

grouped = trips.groupby(['tour_id', 'outbound'])
trips['trip_num'] = grouped.cumcount() + 1

trips['person_id'] = reindex(tours.person_id, trips.tour_id)
trips['household_id'] = reindex(tours.household_id, trips.tour_id)

trips['primary_purpose'] = reindex(tours.primary_purpose, trips.tour_id)

# reorder columns and drop 'direction'
trips = trips[['person_id', 'household_id', 'tour_id', 'primary_purpose',
'trip_num', 'outbound', 'trip_count']]

"""
person_id household_id tour_id primary_purpose trip_num outbound trip_count
0 32927 32927 954910 work 1 True 2
1 32927 32927 954910 work 2 True 2
2 32927 32927 954910 work 1 False 2
3 32927 32927 954910 work 2 False 2
4 33993 33993 985824 univ 1 True 1
5 33993 33993 985824 univ 1 False 2
6 33993 33993 985824 univ 2 False 2
"""

set_trip_index(trips)

return trips


@inject.step()
def stop_frequency(
tours, tours_merged,
Expand Down Expand Up @@ -256,14 +166,14 @@ def stop_frequency(
# FIXME should have added this when tours created?
assert 'primary_purpose' not in tours
if 'primary_purpose' not in tours.columns:
# if not already there, then it will have been added by annotate tours preprocessor
# if not already there, then it will have been added by stop_freq_annotate_tours_preprocessor
assign_in_place(tours, tours_merged[['primary_purpose']])

pipeline.replace_table("tours", tours)

# create trips table
trips = process_trips(tours, stop_frequency_alts)
trips = pipeline.extend_table("trips", trips)
trips = trip.initialize_from_tours(tours, stop_frequency_alts)
pipeline.replace_table("trips", trips)
tracing.register_traceable_table('trips', trips)
pipeline.get_rn_generator().add_channel('trips', trips)

Expand Down
Loading

0 comments on commit 34348ed

Please sign in to comment.