diff --git a/news/gtcadis.rst b/news/gtcadis.rst index c0b29eac53..9c039ce55d 100644 --- a/news/gtcadis.rst +++ b/news/gtcadis.rst @@ -3,7 +3,7 @@ * added gtcadis.py script * first step for the GT-CADIS workflow, further steps to follow -**Changed:** None +**Changed:** alara.py **Deprecated:** None diff --git a/pyne/alara.py b/pyne/alara.py index c3f0e33ee4..704954c136 100644 --- a/pyne/alara.py +++ b/pyne/alara.py @@ -1,10 +1,13 @@ """This module contains functions relevant to the ALARA activation code and the Chebyshev Rational Approximation Method """ from __future__ import print_function +import subprocess +import shutil import os import collections from warnings import warn from pyne.utils import QAWarning, to_sec +from string import Template import numpy as np import tables as tb @@ -904,5 +907,333 @@ def _find_phsrc_dc(idc, phtn_src_dc): # if idc doesn't match any string in phtn_src_dc list, raise an error. raise ValueError('Decay time {0} not found in phtn_src file'.format(idc)) +def _gt_write_matlib(mats): + """ + Function that writes ALARA matlib file + + Parameters + ---------- + mats : list + List of tuples, (, ) + + Returns: + matlib: str + Formatted ALARA matlib + """ + matlib = "" + for mat in mats: + matlib += mat[1].alara() + matlib += "\n" + return matlib + +def _gt_write_fluxin(fluxes, num_n_groups, num_mats): + """ + Function that writes ALARA fluxin file + + Parameters + ---------- + fluxes: numpy array + 2D array of flux values (num_n_groups + 2, num_n_groups) + num_n_groups: int + Number of neutron energy groups + num_mats: int + Number of materials in ALARA input = number of materials in the problem geometry + + Returns: + fluxin: str + Formatted ALARA fluxin + """ + # Order flux groups from high to low for ALARA fluxin inp + fluxes_reversed = np.fliplr(fluxes[:,:]) + fluxin = "" + for m in range(num_mats): + for n in range(num_n_groups + 2): + num_decimals = 6 + num_entries_per_line = 7 + fluxin += _gt_format(fluxes_reversed[n,:], num_decimals, num_entries_per_line) + fluxin += '\n' + return fluxin + +def _gt_format(vector, decimals, columns): + """ + Function that formats a vector of elements into a string for ALARA input file + + Parameters + ---------- + vector: numpy array + 1D array of values to be formatted into a string + decimals: int + Number of decimal points in the formatted numbers + columns: int + Number of elements per line + Returns + ------- + string: str + Formatted string of vector values + """ + string = "" + for i, v in enumerate(vector): + string += "{0:.{1}E} ".format(v, decimals) + if (i + 1) % columns == 0: + string += '\n' + return string + +def _gt_write_inp(run_dir, data_dir, mats, num_mats, num_n_groups, flux_norm, irr_time, decay_times, + input_file, matlib_file, fluxin_file, phtn_src_file, num_p_groups, p_bins): + """ + Function that writes ALARA input file + Parameters + ---------- + run_dir: str + Path to write ALARA input and output files + data_dir: str + Path to directory containing nuclib and fendl files + mats: list + List of tuples, (, ) + num_mats: int + Number of materials in mats + num_n_groups: int + Number of neutron energy groups + flux_norm : float + Neutron flux normalization + irr_time : float + Irradiation time [s] + decay_times : list + Decay times [s] + input_file: str + Path to ALARA input file + matlib_file: str + Path to ALARA matlib file + fluxin_file: str + Path to ALARA fluxin file + phtn_src_file: str + Path to write ALARA output photon source + num_p_groups : int + The number of photon energy groups for ALARA calculation + p_bins: numpy array + Photon energy bin bounds + + Returns: + inp: str + Formatted ALARA input + """ + inp = Template(""" +geometry rectangular + +volume +$zone +end + +mat_loading +$zone_mat +end + +$mix +$flux +output zone + +integrate_energy +$p_groups +end + +pulsehistory my_schedule + 1 0.0 s +end + +schedule total +$irr +end + +cooling +$dt +end + +material_lib $matlib_file +element_lib $data_dir/nuclib +data_library alaralib $data_dir/fendl2.0bin +truncation 1e-7 +impurity 5e-6 1e-3 +dump_file $run_dir/dump_file + """) + + # Zones input and material assignment + num_zones = num_mats * (num_n_groups + 2) + zone = "" + zone_mat = "" + for z in range(num_zones): + zone += " 1.0 zone_{0}\n".format(z) + mix_num = int(np.floor(z / float(num_n_groups + 2))) + zone_mat += " zone_{0} mix_{1}\n".format(z, mix_num) + # Material mixture input + mix = "" + for m, mat in enumerate(mats): + mix += "mixture mix_{0}\n".format(m) + mix += " material {0} 1 1\nend\n\n".format(mat[0]) + # Flux input + flux = "flux flux_inp {0} {1} 0 default".format(fluxin_file, flux_norm) + # Photon energy bin structure + # Precisoin and number of values per line of the photon groups + decimals = 2 + entries_per_line = 8 + p_groups = "photon_source {0}/fendl2.0bin {1} {2}\n{3}".format(data_dir, phtn_src_file, num_p_groups, + _gt_format(p_bins, decimals, entries_per_line)) + # Irradiation schedule input + irr = " {0} s flux_inp my_schedule 0 s".format(irr_time) + # Decay times input + dt = "" + for d in decay_times: + dt += " {0} s\n".format(d) + + inp = inp.substitute(zone=zone, zone_mat=zone_mat, mix=mix, matlib_file=matlib_file, data_dir=data_dir, + run_dir=run_dir, flux=flux, p_groups=p_groups, irr=irr, dt=dt) + return inp + +def _gt_alara(data_dir, mats, num_mats, neutron_spectrum, num_n_groups, irr_time, decay_times, + num_decay_times, p_bins, num_p_groups, run_dir): + """ + Function that prepares necessary input files and runs ALARA + + Parameters + ---------- + data_dir : str + Path to directory containing nuclib and fendl files + mats : list + List of tuples, (, ) + num_mats: int + Number of materials in mats + neutron_spectrum : numpy array + Neutron energy group spectrum (length is equal to number of num_n_groups) + num_n_groups: int + Number of neutron energy groups + irr_time : float + Irradiation time [s] + decay_times : list + Decay times [s] + num_decay_times: int + Number of decay times + p_bins: numpy array + Photon energy bin bounds + num_p_groups : int + Number of photon energy groups for ALARA calculation + run_dir : str + Path to write ALARA input and output files + + Returns + ------- + phtn_src_file : str + Path to ALARA output photon source file + """ + # Normalize neutron energy spectrum for ALARA fluxin file + flux_norm = np.linalg.norm(neutron_spectrum, ord=1) + if flux_norm > 0: + # Normalize neutron spectrum + n_spectrum = neutron_spectrum[:] / flux_norm + + # Write matlib file + matlib_file = os.path.join(run_dir, "matlib") + with open(matlib_file, 'w') as f: + alara_matlib = _gt_write_matlib(mats) + f.write(alara_matlib) + + # Write fluxin file + fluxin_file = os.path.join(run_dir, "fluxin") + fluxes = np.zeros((num_n_groups + 2, num_n_groups)) + for n in range(num_n_groups): + fluxes[n, n] = n_spectrum[n] + # Add total spectrum. Leave last row all zeros, blank "zero" spectrum + fluxes[num_n_groups, :] = n_spectrum[:] + # Total number of materials in ALARA input file + with open(fluxin_file, 'w') as f: + alara_fluxin = _gt_write_fluxin(fluxes, num_n_groups, num_mats) + f.write(alara_fluxin) + + # Write ALARA inp file + input_file = os.path.join(run_dir, "inp") + phtn_src_file = os.path.join(run_dir, "phtn_src") + # Number of zones in ALARA inp = total number of materials * total number of flux blocks per material + with open(input_file, 'w') as f: + alara_inp = _gt_write_inp(run_dir, data_dir, mats, num_mats, num_n_groups, flux_norm, irr_time, + decay_times, input_file, matlib_file, fluxin_file, phtn_src_file, + num_p_groups, p_bins) + f.write(alara_inp) + + # Run ALARA + sub = subprocess.check_output(['alara', input_file], stderr=subprocess.STDOUT) + return phtn_src_file + +def calc_eta(data_dir, mats, num_mats, neutron_spectrum, num_n_groups, irr_time, decay_times, p_bins, + num_p_groups, run_dir): + """ + Function that returns eta values (SNILB check result) for each material/element and each decay time + + Parameters + ---------- + data_dir : str + Path to directory containing nuclib and fendl files + mats: list + List of tuples, (, ) + num_mats: int + Number of materials in mats + neutron_spectrum : numpy array + Neutron energy spectrum (length is equal to number of num_n_groups) + num_n_groups: int + Number of neutron energy groups + irr_time : float + Irradiation time [s] + decay_times : list + Decay times [s] + p_bins: numpy array + Photon energy bin bounds + num_p_groups : int + Number of photon energy groups for ALARA calculation + run_dir: str + Path to write ALARA input and output files + + Returns + ---------- + eta: numpy array + eta value per photon group for each material listed. + This is a 3D array [mat, decay_time, num_p_groups + 1] + phtn_src_file: str + Path to ALARA produced photon source file + """ + num_decay_times = len(decay_times) + + # Run ALARA + phtn_src_file = _gt_alara(data_dir, mats, num_mats, neutron_spectrum, num_n_groups, irr_time, + decay_times, num_decay_times, p_bins, num_p_groups, run_dir) + # Parse ALARA output + # Create an array to store results from photn_src file + entries_per_material = (num_n_groups + 2) * num_decay_times + num_rows = num_mats * entries_per_material + # one for each group and last column for a total + num_columns = num_p_groups + 1 + p_sources = np.zeros(shape=(num_rows, num_columns)) + with open(phtn_src_file, 'r') as f: + # Initiate a block number + i = 0 + for line in f.readlines(): + l = line.split() + if l[0] == "TOTAL" and l[1] != "shutdown": + row = np.array([float(x) for x in l[3:]]) + p_sources[i, :-1] = row[:] + # Total emission intensity over all groups + p_sources[i, -1] = np.sum(row) + i += 1 + + # Claculate eta + eta = np.zeros(shape=(num_mats, num_decay_times, num_p_groups + 1)) + # Split results array "p_sources" by the number of materials + p_sources = np.split(p_sources, num_mats, axis=0) + for m in range(num_mats): + p_source = p_sources[m].reshape(num_n_groups + 2, num_decay_times, num_p_groups + 1) + eta[m, :, :] = (np.sum(p_source[:-2,:,:], axis=0) - p_source[-1, :, :] * num_n_groups)/ \ + (p_source[-2, :, :] - p_source[-1, :, :]) + + eta[np.isnan(eta)] = 1.0 + eta[np.isinf(eta)] = 1.0E10 + + return eta, phtn_src_file + diff --git a/scripts/gtcadis.py b/scripts/gtcadis.py index f7ffa66e09..e147e3c7db 100644 --- a/scripts/gtcadis.py +++ b/scripts/gtcadis.py @@ -1,33 +1,47 @@ #!/usr/bin/env python -import argparse -import yaml -import io +import io +import os +import yaml +import shutil +import argparse import numpy as np -from pyne.mesh import Mesh -from pyne.partisn import write_partisn_input, isotropic_vol_source -from pyne.dagmc import discretize_geom, load +from sets import Set from pyne import nucname +from pyne.mesh import Mesh, IMeshTag from pyne.bins import pointwise_collapse +from pyne.material import Material, MaterialLibrary +from pyne.partisn import write_partisn_input, isotropic_vol_source +from pyne.dagmc import discretize_geom, load, cell_material_assignments +from pyne.alara import calc_eta config_filename = 'config.yml' config = \ - """ +""" +general: + # If 'True' all intermediate files created while running the script will be + # deleted. Change to 'False' if you want to retain all intermediate files. + clean: True + # Number of photon energy groups (24 or 42), default is 42. + p_groups: 42 + # Number of neutron energy groups. Default is 175. + n_groups: 175 + # Optional step to assess all materials in geometry for compatibility with -# SNILB criteria -step0: +# SNILB criteria. +# step 0: Information for Step 0 calculation will be read from Step 2 input # Prepare PARTISN input file for adjoint photon transport step1: # Path to hdf5 geometry file for photon transport - geom_file: + p_geom_file: # Volume ID of adjoint photon source cell on # DAGMC input [Trelis/Cubit .sat file] - src_cell: + src_cell: # Volume [cm^3] of source cell (detector) - src_vol: + src_vol: # Define uniformly spaced, rectagular mesh that covers entire geometry: # Define origin of the mesh (three entries, one per spatial dimension) # Define locations of the coarse meshes in each direction @@ -35,16 +49,25 @@ # Supported: Only one entry per _mesh and _ints for a uniformly # spaced mesh # Separate values with blank space. - origin: - xmesh: - xints: - ymesh: - yints: - zmesh: - zints: + origin: + xmesh: + xints: + ymesh: + yints: + zmesh: + zints: # Calculate T matrix for each material step2: + # Path to material laden geometry (hdf5) file for adjoint neutron transport. + n_geom_file: + # Path to processed nuclear data. + # (directory containing nuclib, fendl2.0bin.lib, fendl2.0bin.gam) + data_dir: + # Single pulse irradiation time [s]. + irr_time: + # Decay times of interest [s]. + decay_times: # Calculate adjoint neutron source step3: @@ -56,19 +79,17 @@ # (biased source and weight windows) step5: - """ - def setup(): - """ This function generates a blank config.yml file for the user to + """ + This function generates a blank config.yml file for the user to fill in with problem specific values. """ with open(config_filename, 'w') as f: f.write(config) print('File "{}" has been written'.format(config_filename)) - print('Fill out the fields in this file then run ">> gtcadis.py step1"') - + print('Fill out the fields in this file then run ">> gtcadis.py step1" or optional step0, first') def _names_dict(): names = {'h1': 'h1', 'h2': 'd', 'h3': 'h3', 'he3': 'he3', @@ -122,31 +143,148 @@ def _cards(source): } return cards +def _get_p_bins(num_p_groups): + """ + Function that returns a photon energy bin structure based on the number + of photon groups in the problem. + + Parameters + ---------- + num_p_groups: int + Number of photon energy groups -def step1(cfg): - """ This function writes the PARTISN input file for the adjoint photon - transport + Returns + ------- + p_E_groups: numpy array + Array of photon energy bin structure + """ + # 24 bin structure [eV] + if num_p_groups == 24: + p_E_group = np.array([1.00E4, 2.00E4, 5.00E4, 1.00E5, 2.00E5, 3.00E5, 4.00E5, 6.00E5, + 8.00E5, 1.00E6, 1.22E6, 1.44E6, 1.66E6, 2.00E6, 2.50E6, 3.00E6, + 4.00E6, 5.00E6, 6.50E6, 8.00E6, 1.00E7, 1.20E7, 1.40E7, 2.00E7]) + # 42 bin structure [eV] + elif num_p_groups == 42: + p_E_group = np.array([1.00E4, 2.00E4, 3.00E4, 4.50E4, 6.00E4, 7.00E4, 7.50E4, 1.00E5, + 1.50E5, 2.00E5, 3.00E5, 4.00E5, 4.50E5, 5.10E5, 5.12E5, 6.00E5, + 7.00E5, 8.00E5, 1.00E6, 1.33E6, 1.34E6, 1.50E6, 1.66E6, 2.00E6, + 2.50E6, 3.00E6, 3.50E6, 4.00E6, 4.50E6, 5.00E6, 5.50E6, 6.00E6, + 6.50E6, 7.00E6, 7.50E6, 8.00E6, 1.00E7, 1.20E7, 1.40E7, 2.00E7, + 3.00E7, 5.00E7]) + return p_E_group + +def step0(cfg, cfg2): + """ + This function performs the SNILB criteria check. + + Parameters + ---------- + cfg : dictionary + User input for 'general' from the config.yml file + cfg2 : dictionary + User input for Step 2 from the config.yml file + """ + # Get user input from config file + clean = cfg['clean'] + num_n_groups = cfg['n_groups'] + num_p_groups = cfg['p_groups'] + geom = cfg2['n_geom_file'] + data_dir = cfg2['data_dir'] + irr_time = cfg2['irr_time'] + decay_times = str(cfg2['decay_times']).split(' ') + + # Define a flat, 175 group, neutron spectrum with magnitude 1.0E12 [n/cm^2.s] + group_flux_magnitude = 1.0E12 + neutron_spectrum = group_flux_magnitude * np.ones(num_n_groups) + + # Get materials from geometry file + mat_lib = MaterialLibrary(geom) + mats = mat_lib.items() + num_mats = len(mats) + + # Perform SNILB check and calculate eta for each material in the geometry + run_dir = 'step0/mats' + if not os.path.exists(run_dir): + os.makedirs(run_dir) + # Get the photon energy bin structure + p_bins = _get_p_bins(num_p_groups) + eta, psrc_file = calc_eta(data_dir, mats, num_mats, neutron_spectrum, num_n_groups, irr_time, + decay_times, p_bins, num_p_groups, run_dir) + # Copy phtn_src file to main directory to be used for Step 2 + shutil.copy(psrc_file, 'step0_phtn_src') + + # Create a list of unique elements in the geometry + elements = Set([ ]) + for mat in mats: + # Collapse elements in the material + mat_collapsed = mat[1].collapse_elements([]) + element_list = mat_collapsed.comp.keys() + elements.update(element_list) + # Create PyNE material library of elements + element_lib = MaterialLibrary() + for element in elements: + mat_element = Material({element: 1.0}) + mat_element_name = "mat:{}".format(nucname.name(element)) + mat_element.metadata['name'] = mat_element_name + mat_element.density = 1.0 + # Add element to the material library + element_lib[mat_element_name] = mat_element + # Add elements to mats + elements = element_lib.items() + num_elements = len(elements) + + # Perform SNILB check and calculate eta for unique elements in the geometry + run_dir = 'step0/elements' + if not os.path.exists(run_dir): + os.makedirs(run_dir) + eta_element, psrc_file = calc_eta(data_dir, elements, num_elements, neutron_spectrum, num_n_groups, + irr_time, decay_times, p_bins, num_p_groups, run_dir) + np.set_printoptions(threshold=np.nan) + + # Save eta arrays to numpy arrays + np.save('step0_eta.npy', eta) + np.save('step0_eta_element.npy', eta_element) + # Write a list of material names and eta values to a text file + with open('step0_eta.txt', 'w') as f: + for m, mat in enumerate(mats): + f.write('{0}, eta={1} \n'.format(mat[0].split(':')[1], eta[m, :, -1])) + f.write('------ \nTotal eta value per unique element: \n------ \n') + for e, element in enumerate(elements): + f.write('{0}, eta={1} \n'.format(element[0].split(':')[1], eta_element[e, :, -1])) + + if clean: + print("Deleting intermediate files for Step 0") + shutil.rmtree(run_dir) + +def step1(cfg, cfg1): + """ + This function writes the PARTISN input file for the adjoint photon transport. + Parameters ---------- cfg : dictionary + User input for 'general' from the config.yml file + cfg1 : dictionary User input for step 1 from the config.yml file """ # Get user-input from config file - geom = cfg['geom_file'] - cells = [cfg['src_cell']] - src_vol = [float(cfg['src_vol'])] + num_n_groups = cfg['n_groups'] + num_p_groups = cfg['p_groups'] + geom = cfg1['p_geom_file'] + cells = [cfg1['src_cell']] + src_vol = [float(cfg1['src_vol'])] try: - origin_x, origin_y, origin_z = cfg['origin'].split(' ') + origin_x, origin_y, origin_z = cfg1['origin'].split(' ') except: print("Too few entries in origin location") - xmesh = cfg['xmesh'] - xints = cfg['xints'] - ymesh = cfg['ymesh'] - yints = cfg['yints'] - zmesh = cfg['zmesh'] - zints = cfg['zints'] + xmesh = cfg1['xmesh'] + xints = cfg1['xints'] + ymesh = cfg1['ymesh'] + yints = cfg1['yints'] + zmesh = cfg1['zmesh'] + zints = cfg1['zints'] # Create structured mesh sc = [np.linspace(float(origin_x), float(xmesh), float(xints) + 1), @@ -155,28 +293,27 @@ def step1(cfg): m = Mesh(structured=True, structured_coords=sc) m.mesh.save("blank_mesh.h5m") - # Generate 42 photon energy bins [eV] - # First bin has been replaced with 1 for log interpolation - photon_bins = np.array([1e-6, 0.01, 0.02, 0.03, 0.045, 0.06, 0.07, 0.075, 0.1, 0.15, - 0.2, 0.3, 0.4, 0.45, 0.51, 0.512, 0.6, 0.7, 0.8, 1, 1.33, 1.34, - 1.5, 1.66, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6, 6.5, 7, 7.5, 8, 10, - 12, 14, 20, 30, 50]) + # Get the photon energy bin structure [Mev] that matches the number of photon + # energy groups in the problem + convert_to_MeV = 1.0E-6 + photon_bins = convert_to_MeV * _get_p_bins(num_p_groups) + # Add an additional bin to the beginning of the array with value of 1 for log interpolation + photon_bins = np.hstack([1.0E-6, photon_bins]) # ICRP 74 flux-to-dose conversion factors in pico-Sv/s per photon flux - de = np.array([0.01, 0.015, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.15, 0.2, 0.3, - 0.4, 0.5, 0.6, 0.8, 1, 2, 4, 6, 8, 10]) - df = np.array([0.0485, 0.1254, 0.205, 0.2999, 0.3381, 0.3572, 0.378, 0.4066, 0.4399, 0.5172, - 0.7523, 1.0041, 1.5083, 1.9958, 2.4657, 2.9082, 3.7269, 4.4834, 7.4896, - 12.0153, 15.9873, 19.9191, 23.76]) + de = np.array([0.01, 0.015, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, + 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.8, 1, 2, 4, 6, 8, 10]) + df = np.array([0.0485, 0.1254, 0.205, 0.2999, 0.3381, 0.3572, 0.378, 0.4066, + 0.4399, 0.5172, 0.7523, 1.0041, 1.5083, 1.9958, 2.4657, 2.9082, + 3.7269, 4.4834, 7.4896, 12.0153, 15.9873, 19.9191, 23.76]) # Convert to Sv/s per photon FLUX - pico = 1.0e-12 - df = df * pico + convert_to_pico = 1.0e-12 + df = df * convert_to_pico # Convert pointwise data to group data for log interpolation - photon_spectrum = pointwise_collapse( - photon_bins, de, df, logx=True, logy=True) + photon_spectrum = pointwise_collapse(photon_bins, de, df, logx=True, logy=True) # Anything below 0.01 MeV should be assigned the DF value of 0.01 MeV photon_spectrum[0] = df[0] # Total number of groups is 217 (42 photon + 175 neutron) - spectra = [np.append(photon_spectrum, np.zeros(175))] + spectra = [np.append(photon_spectrum, np.zeros(num_n_groups))] # The spectrum is normalized by PyNE, so we need to mutliply by the sum of # intensities in the spectrum. # Additionally, we divide by the volume of the source cell in order to get @@ -189,7 +326,7 @@ def step1(cfg): source, dg = isotropic_vol_source(geom, m, cells, spectra, intensities) # PARTISN input - ngroup = 217 # total number of energy groups + ngroup = num_n_groups + num_p_groups # total number of energy groups cards = _cards(source) # block 1, 3, 5 input values names_dict = _names_dict() # dictionary of isotopes (PyNE nucids to bxslib names) @@ -203,22 +340,22 @@ def step1(cfg): data_hdf5path="/materials", nuc_hdf5path="/nucid", fine_per_coarse=1) - - + def main(): - """ This function manages the setup and steps 1-5 for the GT-CADIS workflow. + """ + This function manages the setup and steps 1-5 for the GT-CADIS workflow. """ - - gtcadis_help = ('This script automates the GT-CADIS process of \n' - 'producing variance reduction parameters to optimize the\n' - 'neutron transport step of the Rigorous 2-Step (R2S) method.\n') - setup_help = ('Prints the file "config.yml" to be\n' - 'filled in by the user.\n') - step1_help = 'Creates the PARTISN input file for adjoint photon transport.' + gtcadis_help = ('This script automates the GT-CADIS process of producing \n' + 'variance reduction parameters to optimize the neutron \n' + 'transport step of the Rigorous 2-Step (R2S) method.\n') + setup_help = ('Prints the file "config.yml" to be filled in by the user.\n') + step0_help = ('Performs SNILB criteria check.') + step1_help = ('Creates the PARTISN input file for adjoint photon transport.') + parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(help=gtcadis_help, dest='command') - setup_parser = subparsers.add_parser('setup', help=setup_help) + step0_parser = subparsers.add_parser('step0', help=step0_help) step1_parser = subparsers.add_parser('step1', help=step1_help) args, other = parser.parse_known_args() @@ -227,10 +364,11 @@ def main(): else: with open(config_filename, 'r') as f: cfg = yaml.load(f) - - if args.command == 'step1': - step1(cfg['step1']) - + + if args.command == 'step0': + step0(cfg['general'], cfg['step2']) + elif args.command == 'step1': + step1(cfg['general'], cfg['step1']) if __name__ == '__main__': main()