Skip to content

Commit

Permalink
Adding a Vision2050 EIS long list summary
Browse files Browse the repository at this point in the history
  • Loading branch information
Ennazus committed Aug 9, 2018
1 parent 556c196 commit 9cc7f6e
Showing 1 changed file with 289 additions and 0 deletions.
289 changes: 289 additions & 0 deletions scripts/summarize/notebooks/Vision 2050 EIS - long list.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,289 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import pandas as pd\n",
"import os\n",
"import collections"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"mode_dict = {0: 'Other', 1:'Walk',2:'Bike',3:'SOV',4:'HOV2',5:'HOV3+',6:'Transit',8:'School Bus'}\n",
"annualization_factor = 300"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Relative path between notebooks and goruped output directories\n",
"relative_path = '../../../outputs'\n",
"output_vision_file = os.path.join(relative_path, 'Vision2050_longlist.csv')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Load network results\n",
"network_df = pd.read_excel(os.path.join(relative_path,'network/') + r'network_summary_detailed.xlsx',\n",
" sheetname='Network Summary')"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Load Daysim results\n",
"trip = pd.read_csv(os.path.join(relative_path,r'daysim/') + r'_trip.tsv', sep='\\t')\n",
"person = pd.read_csv(os.path.join(relative_path,r'daysim/') + r'_person.tsv', sep='\\t')"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Average HBW/ Non-HBW Trip Distance\n",
"trip['Trip Type'] = 'Not Home-Based Work'\n",
"trip.ix[(((trip['opurp']==0) & (trip['dpurp']==1)) | ((trip['opurp']==1) & (trip['dpurp']==0))),'Trip Type']= 'Home-Based Work'\n",
"hbw_trips = trip.loc[trip['Trip Type']=='Home-Based Work']\n",
"nhbw_trips = trip.loc[trip['Trip Type']=='Not Home-Based Work']\n",
"\n",
"output_dict = {'Average Commute Trip Length':hbw_trips['travdist'].mean()}\n",
"output_dict = collections.OrderedDict(output_dict)\n",
"output_dict['Average Other Trip Length']=nhbw_trips['travdist'].mean()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# VMT per person per day\n",
"network_df['all_facility_vmt'] = network_df['arterial_vmt']+ network_df['connectors_vmt']+ network_df['highway_vmt']\n",
"total_vmt = network_df['all_facility_vmt'].sum()\n",
"total_persons = person.sum()['psexpfac']\n",
"vmt_per_person = total_vmt/total_persons\n",
"output_dict['Daily VMT per person']=vmt_per_person"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# VHT per person per day\n",
"network_df['all_facility_vht'] = network_df['arterial_vht']+ network_df['connectors_vht']+ network_df['highway_vht']\n",
"total_vht = network_df['all_facility_vht'].sum()\n",
"vht_per_person = total_vht/total_persons\n",
"output_dict['Daily VHT per person']= vht_per_person"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Delay per person annually\n",
"network_df['all_facility_delay'] = network_df['arterial_delay']+ network_df['connectors_delay']+ network_df['highway_delay']\n",
"total_delay = network_df['all_facility_delay'].sum()\n",
"delay_per_person = (total_delay/total_persons)*annualization_factor\n",
"output_dict['Annual Delay Hours per Person']= vht_per_person"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Total Delay Hours Daily by Facility Type\n",
"df_fac = pd.DataFrame(network_df.sum()[['arterial_delay','highway_delay']])\n",
"df_fac = df_fac.reset_index()\n",
"df_fac.columns = ['Facility Type', 'Delay']\n",
"#df_fac.index = df_fac['Facility Type']\n",
"#df_fac.drop('Facility Type', axis=1, inplace=True)\n",
"df_fac.loc['Total'] = df_fac.sum()\n",
"output_dict['Daily Arterial Delay Hours'] = df_fac['Delay'].loc[df_fac['Facility Type'] == 'arterial_delay'].values[0]\n",
"output_dict['Daily Highway Delay Hours'] = df_fac['Delay'].loc[df_fac['Facility Type'] == 'highway_delay'].values[0]"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Daily Transit Boardings\n",
"transit_df = pd.read_excel(r'../../../outputs/network/network_summary_detailed.xlsx', sheetname='Transit Summaries')\n",
"tod_list = ['5to6','6to7','7to8','8to9','9to10','10to14','14to15','15to16','16to17','17to18','18to20']\n",
"transit_df = transit_df[[tod+'_board' for tod in tod_list]+['route_code']]\n",
"transit_df = transit_df.fillna(0)\n",
"transit_df['line_total'] = transit_df[[tod+'_board' for tod in tod_list]].sum(axis=1)\n",
"output_dict['Daily Transit Boardings']=transit_df['line_total'].sum()\n",
"output_df = pd.DataFrame(output_dict.items(), columns = ['Data Item', 'Value'])\n",
"output_df['Group'] = 'Total'"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# All Trip Mode Share\n",
"model_df = trip[['mode','trexpfac']].groupby('mode').sum()[['trexpfac']]/trip[['trexpfac']].sum()\n",
"model_df = model_df.reset_index()\n",
"\n",
"model_df.replace({'mode':mode_dict}, inplace=True)\n",
"model_df.columns = ['Data Item', 'Value']\n",
"model_df['Group'] = 'All Trip Mode Share'\n",
"\n",
"output_df =pd.concat([output_df, model_df])"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Commute Trip Mode Share\n",
"model_df_hbw = hbw_trips[['mode','trexpfac']].groupby('mode').sum()[['trexpfac']]/trip[['trexpfac']].sum()\n",
"model_df_hbw = model_df_hbw.reset_index()\n",
"model_df_hbw.replace({'mode':mode_dict}, inplace=True)\n",
"model_df_hbw.columns = ['Data Item', 'Value']\n",
"model_df_hbw['Group'] = 'Commute Trip Mode Share'\n",
"\n",
"output_df =pd.concat([output_df, model_df_hbw])"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Non-Commute Trip Mode Share\n",
"model_df_nhbw = nhbw_trips[['mode','trexpfac']].groupby(['mode']).sum()[['trexpfac']]/nhbw_trips[['trexpfac']].sum()\n",
"model_df_nhbw.reset_index(inplace=True)\n",
"model_df_nhbw.replace({'mode':mode_dict}, inplace=True)\n",
"model_df_nhbw.columns = ['Data Item', 'Value']\n",
"model_df_nhbw['Group'] = 'Non-Commute Trip Mode Share'\n",
"\n",
"output_df =pd.concat([output_df, model_df_nhbw])\n"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Number and Percent of People Walking, Biking, or Transiting\n",
"trip_person = pd.merge(trip,person,on=['hhno','pno'], how='left')\n",
"bike_walk_t_trips = trip_person[trip_person['mode'].isin([1,2,6])]\n",
"\n",
"df = bike_walk_t_trips.groupby(['hhno','pno']).count()\n",
"df = df.reset_index()\n",
"df = df[['hhno','pno']]\n",
"df['bike_walk_t'] = True\n",
"\n",
"df = pd.merge(person,df,on=['hhno','pno'], how='left')\n",
"df['bike_walk_t'] = df['bike_walk_t'].fillna(False)\n",
"\n",
"df_share = pd.DataFrame(df.groupby('bike_walk_t').sum()['psexpfac']/df['psexpfac'].sum())\n",
"df_total = pd.DataFrame(df.groupby('bike_walk_t').sum()['psexpfac'])\n",
"bike_walk_dict= {'Share of People Walking, Biking, and Using Transit':df_share.loc[True]['psexpfac'], \n",
" 'Total People Walking, Biking, and Using Transit':df_total.loc[True]['psexpfac']}\n",
"bike_walk_df = pd.DataFrame(bike_walk_dict.items(), columns = ['Data Item', 'Value'])\n",
"\n",
"bike_walk_df['Group'] = 'Total'\n",
"output_df =pd.concat([output_df, bike_walk_df])\n"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"output_df.to_csv(output_vision_file, index= False)"
]
}
],
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 1
}

0 comments on commit 9cc7f6e

Please sign in to comment.