Skip to content

Commit

Permalink
Updates to the GAMS api
Browse files Browse the repository at this point in the history
  • Loading branch information
squoilin committed Apr 10, 2017
1 parent d63fd92 commit 7c5ffb3
Show file tree
Hide file tree
Showing 6 changed files with 63 additions and 38 deletions.
23 changes: 15 additions & 8 deletions DispaSET/misc/gdx_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,18 @@ def package_exists(package):


def import_local_lib(lib):
# Try to import the GAMS api and gdxcc to write gdx files
'''
Try to import the GAMS api and gdxcc to write gdx files
'''
# First define the path to the 'Externals' folder. This path must be defined relatively to the current script location
path_script = os.path.dirname(__file__)
path_ext = os.path.join(path_script,'../../Externals')

if lib == 'gams':
if sys.platform == 'linux2' and platform.architecture()[0] == '64bit':
sys.path.append('./Externals/gams_api/linux64/')
sys.path.append(os.path.join(path_ext,'gams_api/linux64/'))
elif sys.platform == 'win32' and platform.architecture()[0] == '64bit':
sys.path.append('./Externals/gams_api/win64/')
sys.path.append(os.path.join(path_ext,'gams_api/win64/'))
try:
import gams
return True
Expand All @@ -47,19 +53,20 @@ def import_local_lib(lib):
sys.exit(1)
elif lib == 'gdxcc':
if sys.platform == 'linux2' and platform.architecture()[0] == '32bit':
sys.path.append('./Externals/gdxcc/linux32/')
sys.path.append(os.path.join(path_ext,'gdxcc/linux32/'))
elif sys.platform == 'linux2' and platform.architecture()[0] == '64bit':
sys.path.append('./Externals/gams_api/linux64/')
sys.path.append(os.path.join(path_ext,'gams_api/linux64/'))
elif sys.platform == 'win32' and platform.architecture()[0] == '32bit':
sys.path.append('./Externals/gdxcc/win32/')
sys.path.append(os.path.join(path_ext,'gdxcc/win32/'))
elif sys.platform == 'win32' and platform.architecture()[0] == '64bit':
sys.path.append('./Externals/gams_api/win64/')
sys.path.append(os.path.join(path_ext,'gams_api/win64/'))
elif sys.platform == 'darwin':
sys.path.append('./Externals/gdxcc/osx64/')
sys.path.append(os.path.join(path_ext,'gdxcc/osx64/'))
try:
import gdxcc
return True
except ImportError:
print [x for x in sys.path]
logging.critical("gdxcc module could not be found. GDX cannot be produced or read")
sys.exit(1)
else:
Expand Down
29 changes: 16 additions & 13 deletions DispaSET/preprocessing/data_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,30 +81,33 @@ def define_parameter(sets_in, sets, value=0):
return {'sets': sets_in, 'val': values}


def invert_dic_df(dic):
def invert_dic_df(dic,tablename=''):
"""
Function that takes as input a dictionary of dataframes, and inverts the key of
the dictionary with the columns headers of the dataframes
:param dic: dictionary of dataframes, with the same columns headers and the same index
:param tablename: string with the name of the table being processed (for the error msg)
:returns: dictionary of dataframes, with swapped headers
"""
cols = [aa for aa in dic[dic.keys()[0]].columns]
index = dic[dic.keys()[0]].index
cols_out = dic.keys()
# keys are defined as the keys of the original dictionary, cols are the columns of the original dataframe
# items are the keys of the output dictionary, i.e. the columns of the original dataframe
dic_out = {}
for col in cols:
dic_out[col] = pd.DataFrame(index=index, columns=cols_out)
# First, check that all indexes have the same length:
index = dic[dic.keys()[0]].index
for key in dic:
if [aa for aa in dic[key].columns] != cols:
logging.error('The columns of the dataframes within the dictionary are not all equal')
sys.exit(1)
if len(dic[key].index) != len(index):
logging.error('The indexes of the dataframes within the dictionary are not all equal')
logging.error('The indexes of the data tables "' + tablename + '" are not equal in all the files')
sys.exit(1)
for col in cols:
dic_out[col][key] = dic[key][col]
return dic_out #TODO: this def could be replaced by pd.Panel.fromDict(dic, orient='minor')
# Then put the data in a panda Panel with minor orientation:
panel = pd.Panel.fromDict(dic, orient='minor')
# Display a warning if some items are missing in the original data:
for item in panel.items:
for key in dic.keys():
if item not in dic[key].columns:
logging.warn('The column "' + item + '" is not present in "' + key + '" for the "' + tablename + '" data. Zero will be assumed')
dic_out[item] = panel[item].fillna(0)
return dic_out


def write_to_excel(xls_out, list_vars):
Expand Down
17 changes: 9 additions & 8 deletions DispaSET/preprocessing/preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def build_simulation(config):
path = config['RenewablesAF'].replace('##', str(c))
tmp = load_csv(path, index_col=0, parse_dates=True)
values[c] = tmp.loc[idx_utc_noloc, :]
Renewables = invert_dic_df(values)
Renewables = invert_dic_df(values,tablename='AvailabilityFactors')

# Outage factors:
paths = {}
Expand Down Expand Up @@ -135,12 +135,13 @@ def build_simulation(config):

# Power plants:
plants = pd.DataFrame()
for c in config['countries']:
path = config['PowerPlantData'].replace('##', str(c))
tmp = load_csv(path)
# if tmp.index.dtype == 'int64' or tmp.index.dtype == 'float' or tmp.index.dtype == 'int32':
# sys.exit('Error in country ' + c + '. Power plants ids must be strings (not numbers) and must be unique even between countries')
plants = plants.append(tmp, ignore_index=True)
if os.path.isfile(config['PowerPlantData']):
plants = load_csv(config['PowerPlantData'])
elif '##' in config['PowerPlantData']:
for c in config['countries']:
path = config['PowerPlantData'].replace('##', str(c))
tmp = load_csv(path)
plants = plants.append(tmp, ignore_index=True)
plants = plants[plants['Technology'] != 'Other']
plants = plants[pd.notnull(plants['PowerCapacity'])]
plants.index = range(len(plants))
Expand Down Expand Up @@ -328,7 +329,7 @@ def build_simulation(config):
logging.warn('No outage data found for plant "' + str(
oldname) + '". Using the non country-specific outage profile provided for technology "' + str(
Plants_merged['Technology'][newname]) + '".')
Outages[oldname] = Outages[('all', Plants_merged['Technology'][newname])]
Outages[oldname] = TechOutages[('all', Plants_merged['Technology'][newname])]

# Merging the time series relative to the clustered power plants:
ReservoirScaledInflows_merged = merge_series(plants, ReservoirScaledInflows, mapping, method='WeightedAverage', tablename='ScaledInflows')
Expand Down
10 changes: 9 additions & 1 deletion DispaSET/preprocessing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def incidence_matrix(sets, set_used, parameters, param_used):
first_country = sets[set_used][i][0:2]
second_country = sets[set_used][i][6:9]
else:
logging.error('The format of the interconnection is not admitted.')
logging.error('The format of the interconnection is not valid.')
sys.exit(1)

for j in range(len(sets['n'])):
Expand Down Expand Up @@ -90,8 +90,14 @@ def interconnections(Simulation_list, NTC_inter, Historical_flows):
for interconnection in simulation_connections:
if interconnection in NTC_inter.columns:
df_countries_simulated[interconnection] = NTC_inter[interconnection]
logging.info('Detected interconnection ' + interconnection + '. The historical NTCs will be imposed as maximum flow value')
interconnections1 = df_countries_simulated.columns

# Display a warning if a country is isolated:
for c in Simulation_list:
if not any([c in conn for conn in interconnections1]):
logging.warn('Zone ' + c + ' Does not appear to be connected to any other zone in the NTC table. It should be simulated in isolation')

df_RoW_temp = pd.DataFrame(index=index)
connNames = []
for interconnection in all_connections:
Expand All @@ -112,8 +118,10 @@ def interconnections(Simulation_list, NTC_inter, Historical_flows):
for name in connNames:
if nameToCompare[0:2] in name[0:2]:
exports.append(connNames.index(name))
logging.info('Detected interconnection ' + name + ', happening between a simulated zone and the rest of the world. The historical flows will be imposed to the model')
elif nameToCompare[0:2] in name[6:8]:
imports.append(connNames.index(name))
logging.info('Detected interconnection ' + name + ', happening between the rest of the world and a simulated zone. The historical flows will be imposed to the model')

flows_out = pd.concat(df_RoW_temp[connNames[exports[i]]] for i in range(len(exports)))
flows_out = flows_out.groupby(flows_out.index).sum()
Expand Down
17 changes: 10 additions & 7 deletions DispaSET/solve.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def is_sim_folder_ok(sim_folder):
return False

if not os.path.exists(os.path.join(sim_folder, 'Inputs.gdx')):
logging.error('There is no Inputs.gdx file within the specified DispaSET simulation environment folder (' + sim_folder + ')')
logging.error('There is no Inputs.gdx file within the specified DispaSET simulation environment folder (' + sim_folder + '). Check that the GDX output is activated in the option file and that no error stated during the pre-processing')
return False

if not os.path.exists(os.path.join(sim_folder, 'UCM_h.gms')):
Expand All @@ -35,7 +35,7 @@ def is_sim_folder_ok(sim_folder):
return True


def solve_GAMS(sim_folder, gams_folder=None):
def solve_GAMS(sim_folder, gams_folder=None, output_lst=False):
if not package_exists('gams'):
logging.warning('Could not import gams. Trying to automatically locate gdxcc folder')
if not import_local_lib('gams'):
Expand All @@ -55,18 +55,21 @@ def solve_GAMS(sim_folder, gams_folder=None):
shutil.copy(os.path.join(sim_folder, 'UCM_h.gms'), ws.working_directory)
shutil.copy(os.path.join(sim_folder, 'Inputs.gdx'), ws.working_directory)
t1 = ws.add_job_from_file('UCM_h.gms')
#Do not create .lst file
opt = ws.add_options()
if sys.platform == 'win32':
opt.output = 'nul'
else:
opt.output = '/dev/null'
#Do not create .lst file
if not output_lst:
if sys.platform == 'win32':
opt.output = 'nul'
else:
opt.output = '/dev/null'
time0 = time.time()
t1.run(opt)
logging.info('Completed simulation in {0:.2f} seconds'.format(time.time() - time0))

# copy the result file to the simulation environment folder:
shutil.copy(os.path.join(ws.working_directory, 'Results.gdx'), sim_folder)
if output_lst:
shutil.copy(os.path.join(ws.working_directory, 'UCM_h.lst'), sim_folder)
return t1
else:
return False
Expand Down
5 changes: 4 additions & 1 deletion dispacli.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ def cli(ctx, config, engine):
@click.pass_context
def build(ctx):
"""Build simulation files"""

conf = ctx.obj['conf']
engine = ctx.obj['engine']
if engine == 'gams' and conf['WriteGDX']:
logging.warn('The config specifies that a gdx file should be written, although PYOMO is selected as engine. This a properly installed version of GAMS. Desactivate the option if it is not the case')
SimData = ds.build_simulation(ctx.obj['conf'] )


Expand Down

0 comments on commit 7c5ffb3

Please sign in to comment.