Skip to content

Commit

Permalink
completes costgraph parameter refactor for config file
Browse files Browse the repository at this point in the history
  • Loading branch information
rjhanes committed Sep 9, 2021
1 parent 2e9bedc commit 7290b0a
Showing 1 changed file with 65 additions and 76 deletions.
141 changes: 65 additions & 76 deletions celavi/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,20 +52,16 @@
subfolder_dict = {}
# input data folder for pre-processed route datas
subfolder_dict['preprocessing_output_folder'] = os.path.join(args.data,
data_dirs.get('preprocessing_output_folder',
'preprocessing/'))
data_dirs.get('preprocessing_output_folder'))
# input data folder for LCI
subfolder_dict['lci_folder'] = os.path.join(args.data,
data_dirs.get('lci_folder',
'pylca_celavi_data'))
data_dirs.get('lci_folder'))
# output folder for CELAVI results
subfolder_dict['outputs_folder'] = os.path.join(args.data,
data_dirs.get('outputs_folder',
'outputs'))
data_dirs.get('outputs_folder'))
# output folder for intermediate routing data
subfolder_dict['routing_output_folder'] = os.path.join(args.data,
data_dirs.get('routing_output_folder',
'preprocessing/routing_intermediate_files/'))
data_dirs.get('routing_output_folder'))

# check if directories exist, if not, create them
for folder in subfolder_dict.values():
Expand All @@ -77,74 +73,63 @@
# TODO: add check to ensure files exist
# general inputs
locations_computed_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
inputs.get('locs',
'locations_computed.csv'))
data_dirs.get('inputs'),
inputs.get('locs'))
step_costs_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'step_costs.csv')
data_dirs.get('inputs'),
inputs.get('step_costs'))
fac_edges_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'fac_edges.csv')
data_dirs.get('inputs'),
inputs.get('fac_edges'))
transpo_edges_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'transpo_edges.csv')
data_dirs.get('inputs'),
inputs.get('transpo_edges'))
route_pair_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'route_pairs.csv')
data_dirs.get('inputs'),
inputs.get('route_pairs'))
avg_blade_masses_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'avgblademass.csv')
data_dirs.get('inputs'),
inputs.get('avgblademass'))
routes_custom_filename = os.path.join(args.data,
data_dirs.get('main',
'preprocessing_output/'),
'routes.csv')
data_dirs.get('inputs'),
inputs.get('routes_custom'))
routes_computed_filename = os.path.join(args.data,
data_dirs.get('main',
'preprocessing_output/'),
'routes_computed.csv')
data_dirs.get('preprocessing_outputs'),
inputs.get('routes_computed'))

# input file paths for precomputed US road network data
# transport graph (pre computed; don't change)
transportation_graph_filename = os.path.join(args.data,
data_dirs.get('main',
'inputs/'),
'precomputed_us_road_network',
'transportation_graph.csv')
data_dirs.get('us_roads'),
inputs.get('transportation_graph'))

# node locations for transport graph (pre computed; don't change)
node_locations_filename = os.path.join(args.data, 'inputs',
'precomputed_us_road_network',
'node_locations.csv')
node_locations_filename = os.path.join(args.data,
data_dirs.get('us_roads'),
inputs.get('node_locs'))

# file paths for raw data used to compute locations
wind_turbine_locations_filename = os.path.join(args.data, 'inputs',
'raw_location_data',
'uswtdb_v4_1_20210721.csv')
wind_turbine_locations_filename = os.path.join(args.data,
data_dirs.get('raw_locations'),
inputs.get('power_plant_locs'))
# LMOP data for landfill locations
landfill_locations_filename = os.path.join(args.data, 'inputs',
'raw_location_data',
'landfilllmopdata.csv')
landfill_locations_filename = os.path.join(args.data,
data_dirs.get('raw_locations'),
inputs.get('landfill_locs'))
# other facility locations (e.g., cement)
other_facility_locations_filename = os.path.join(args.data, 'inputs',
'raw_location_data',
'other_facility_locations_all_us.csv')
other_facility_locations_filename = os.path.join(args.data,
data_dirs.get('raw_locations'),
inputs.get('other_facility_locs'))

lookup_facility_type_filename = os.path.join(args.data, 'lookup_tables',
'facility_type.csv')
lookup_facility_type_filename = os.path.join(args.data,
data_dirs.get('lookup_tables'),
inputs.get('lookup_facility_type'))

turbine_data_filename = os.path.join(args.data, 'inputs', 'number_of_turbines.csv')
turbine_data_filename = os.path.join(args.data,
data_dirs.get('inputs'),
inputs.get('turbine_data'))


if args.list == ['US']:
print('National Scale Run')
data_filtering_choice = False
#Data filtering for states
data_filtering_choice = data_filtering.get('enable_data_filtering', False)
if data_filtering_choice:
Expand All @@ -156,9 +141,13 @@



# Pickle file containing CostGraph object
costgraph_pickle_filename = os.path.join(args.data, 'inputs', 'netw.obj')
costgraph_csv_filename = os.path.join(args.data, 'inputs', 'netw.csv')
# Pickle and CSV files for initialized CostGraph object
costgraph_pickle_filename = os.path.join(args.data,
data_dirs.get('inputs'),
outputs.get('costgraph_pickle'))
costgraph_csv_filename = os.path.join(args.data,
data_dirs.get('outputs'),
outputs.get('costgraph_csv'))

# Because the LCIA code has filenames hardcoded and cannot be reconfigured,
# change the working directory to the lci_folder to accommodate those read
Expand Down Expand Up @@ -192,9 +181,9 @@
preprocessing_output_folder=subfolder_dict['preprocessing_output_folder'])

if use_computed_routes:
args.routes = routes_computed_filename
routes = routes_computed_filename
else:
args.routes = routes_custom_filename
routes = routes_custom_filename

avgblade = pd.read_csv(avg_blade_masses_filename)

Expand All @@ -209,24 +198,24 @@
fac_edges_file=fac_edges_filename,
transpo_edges_file=transpo_edges_filename,
locations_file=locations_computed_filename,
routes_file=args.routes,
sc_begin= 'manufacturing',
sc_end=['landfilling', 'cement co-processing', 'blade next use'],
year=2000.0,
max_dist=300.0,
verbose=1,
save_copy=True,
routes_file=routes,
sc_begin=cg_params.get('sc_begin'),
sc_end=cg_params.get('sc_end'),
year=scenario_params.get('start_year'),
max_dist=scenario_params.get('max_dist'),
verbose=cg_params.get('cg_verbose'),
save_copy=cg_params.get('save_cg_csv'),
save_name=costgraph_csv_filename,
blade_mass=avgblade.loc[avgblade.year==2000,
blade_mass=avgblade.loc[avgblade.year==cg_params.get('start_year'),
'Glass Fiber:Blade'].values[0],
finegrind_cumul_initial=1.0,
coarsegrind_cumul_initial=1.0,
finegrind_initial_cost=165.38,
finegrind_revenue=242.56,
coarsegrind_initial_cost=121.28,
finegrind_learnrate=-0.05,
coarsegrind_learnrate=-0.05,
finegrind_material_loss=0.3,
finegrind_cumul_initial=cg_params.get('finegrind_cumul_initial'),
coarsegrind_cumul_initial=cg_params.get('coarsegrind_cumul_initial'),
finegrind_initial_cost=cg_params.get('finegrind_initial_cost'),
finegrind_revenue=cg_params.get('finegrind_revenue'),
coarsegrind_initial_cost=cg_params.get('coarsegrind_initial_cost'),
finegrind_learnrate=cg_params.get('finegrind_learnrate'),
coarsegrind_learnrate=cg_params.get('coarsegrind_learnrate'),
finegrind_material_loss=cg_params.get('finegrind_material_loss'),
)
print('CostGraph initialized at %d s' % np.round(time.time() - time0, 1),
flush=True)
Expand Down

0 comments on commit 7290b0a

Please sign in to comment.