From cf787d7bd6d64eb8e0da59afb23ecd25458f72df Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Oct 2019 18:24:44 +0200 Subject: [PATCH 001/111] integrate nomopyomo --- pypsa/__init__.py | 2 +- pypsa/components.py | 92 +++++- pypsa/opf_lm.py | 671 ++++++++++++++++++++++++++++++++++++++++ pypsa/opt_lm.py | 585 +++++++++++++++++++++++++++++++++++ pypsa/variables.csv | 18 ++ test/test_ac_dc_lopf.py | 37 ++- 6 files changed, 1386 insertions(+), 19 deletions(-) create mode 100644 pypsa/opf_lm.py create mode 100644 pypsa/opt_lm.py create mode 100644 pypsa/variables.csv diff --git a/pypsa/__init__.py b/pypsa/__init__.py index fcd5ae012..2d15fbb95 100644 --- a/pypsa/__init__.py +++ b/pypsa/__init__.py @@ -26,7 +26,7 @@ from __future__ import absolute_import from . import components, descriptors -from . import pf, opf, plot, networkclustering, io, contingency, geo +from . import pf, opf, plot, networkclustering, io, contingency, geo, opf_lm, opt_lm from .components import Network, SubNetwork diff --git a/pypsa/components.py b/pypsa/components.py index 6443f52e2..d14a558ec 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -67,14 +67,12 @@ from .opf import network_lopf, network_opf +from .opf_lm import network_lopf as network_lopf_lm + from .plot import plot, iplot from .graph import graph, incidence_matrix, adjacency_matrix -import inspect - -import sys - import logging logger = logging.getLogger(__name__) @@ -198,7 +196,7 @@ class Network(Basic): pf = network_pf - lopf = network_lopf +# lopf = network_lopf opf = network_opf @@ -407,6 +405,90 @@ def set_snapshots(self,snapshots): #NB: No need to rebind pnl to self, since haven't changed it + def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, + solver_logfile=None, solver_options={}, keep_files=False, + formulation="kirchhoff", extra_postprocessing=None, pyomo=True, + **kwargs): + """ + Linear optimal power flow for a group of snapshots. + + Parameters + ---------- + snapshots : list or index slice + A list of snapshots to optimise, must be a subset of + network.snapshots, defaults to network.snapshots + solver_name : string + Must be a solver name that pyomo recognises and that is + installed, e.g. "glpk", "gurobi" + pyomo : bool, default True + Whether to use pyomo for building and solving the model, setting + this to False saves a lot of memory and time. + solver_logfile : None|string + If not None, sets the logfile option of the solver. + solver_options : dictionary + A dictionary with additional options that get passed to the solver. + (e.g. {'threads':2} tells gurobi to use only 2 cpus) + keep_files : bool, default False + Keep the files that pyomo constructs from OPF problem + construction, e.g. .lp file - useful for debugging + formulation : string + Formulation of the linear power flow equations to use; must be + one of ["angles","cycles","kirchhoff","ptdf"] + extra_functionality : callable function + This function must take two arguments + `extra_functionality(network,snapshots)` and is called after + the model building is complete, but before it is sent to the + solver. It allows the user to + add/change constraints and add/change the objective function. + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user to + extract further information about the solution, such as additional shadow prices. + + + These arguments can be used if pyomo is set to False: + ----------------------------------------------------- + warmstart : bool or string, default False + Use this to warmstart the optimization. Pass a string which gives + the path to the basis file. If set to True, a path to + a basis file must be given in network.basis_fn. + store_basis : bool, default True + Whether to store the basis of the optimization results. If True, + the path to the basis file is saved in network.basis_fn. Note that + a basis can only be stored if simplex, dual-simplex, or barrier + *with* crossover is used for solving. + + + These arguments can be used if pyomo is set to True: + ---------------------------------------------------- + ptdf_tolerance : float + Value below which PTDF entries are ignored + free_memory : set, default {'pyomo'} + Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series + data away while the solver runs (as a pickle to disk) and/or free + `pyomo` data after the solution has been extracted. + solver_io : string, default None + Solver Input-Output option, e.g. "python" to use "gurobipy" for + solver_name="gurobi" + skip_pre : bool, default False + Skip the preliminary steps of computing topology, calculating + dependent values and finding bus controls. + + Returns + ------- + None + """ + args = {'snapshots': snapshots, 'keep_files': keep_files, + 'solver_options': solver_options, 'formulation': formulation, + 'extra_functionality': extra_functionality, + 'extra_postprocessing': extra_postprocessing, + 'solver_name': solver_name, 'solver_logfile': solver_logfile} + if pyomo: + return network_lopf(self, **args, **kwargs) + else: + return network_lopf_lm(self, **args, **kwargs) + def add(self, class_name, name, **kwargs): diff --git a/pypsa/opf_lm.py b/pypsa/opf_lm.py new file mode 100644 index 000000000..95cccf5d7 --- /dev/null +++ b/pypsa/opf_lm.py @@ -0,0 +1,671 @@ +## Copyright 2019 Tom Brown (KIT), Fabian Hofmann (FIAS) + +## This program is free software; you can redistribute it and/or +## modify it under the terms of the GNU General Public License as +## published by the Free Software Foundation; either version 3 of the +## License, or (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +"""nomopyomo: build optimisation problems from PyPSA networks without +Pyomo. nomopyomo = no more Pyomo.""" + +from .opt_lm import (get_bounds_pu, get_extendable_i, + get_non_extendable_i, write_bound, write_constraint, + set_conref, set_varref, get_con, get_var, lookup, + nominals, reset_counter, expand_series, join_exprs, linexpr) + +from .pf import (find_cycles as find_cycles, _as_snapshots, + get_switchable_as_dense as get_as_dense) + +from . import opt_lm + +import pandas as pd +import numpy as np + +import gc, string, random, time, os, re + +import logging +logger = logging.getLogger(__name__) + +def define_nominal_for_extendable_variables(n, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + lower = n.df(c)[attr+'_min'][ext_i] + upper = n.df(c)[attr+'_max'][ext_i] + variables = write_bound(n, lower, upper) + set_varref(n, variables, c, attr, pnl=False) + + +def define_dispatch_for_extendable_variables(n, sns, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + variables = write_bound(n, -np.inf, np.inf, axes=[sns, ext_i]) + set_varref(n, variables, c, attr, pnl=True, spec='extendables') + + +def define_dispatch_for_non_extendable_variables(n, sns, c, attr): + fix_i = get_non_extendable_i(n, c) + if fix_i.empty: return + nominal_fix = n.df(c)[nominals.at[c]][fix_i] + min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) + lower = min_pu.mul(nominal_fix) + upper = max_pu.mul(nominal_fix) + variables = write_bound(n, lower, upper) + set_varref(n, variables, c, attr, pnl=True, spec='nonextendables') + + +def define_dispatch_for_extendable_constraints(n, sns, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) + operational_ext_v = get_var(n, c, attr)[ext_i] + nominal_v = get_var(n, c, nominals.at[c])[ext_i] + rhs = 0 + + lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), + return_axes=True) + constraints = write_constraint(n, lhs, '>=', rhs, axes) + set_conref(n, constraints, c, 'mu_upper', pnl=True, spec=attr) + + lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), + return_axes=True) + constraints = write_constraint(n, lhs, '<=', rhs, axes) + set_conref(n, constraints, c, 'mu_lower', pnl=True, spec=attr) + + +def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): + if pnl: + if attr + '_set' not in n.pnl(c): return + fix = n.pnl(c)[attr + '_set'].unstack().dropna() + if fix.empty: return + lhs = linexpr((1, get_var(n, c, attr).unstack()[fix.index])) + constraints = write_constraint(n, lhs, '=', fix).unstack().T + else: + if attr + '_set' not in n.df(c): return + fix = n.df(c)[attr + '_set'].dropna() + if fix.empty: return + lhs = linexpr((1, get_var(n, c, attr)[fix.index])) + constraints = write_constraint(n, lhs, '=', fix) + set_conref(n, constraints, c, f'mu_{attr}_set', pnl) + + +def define_ramp_limit_constraints(n, sns): + c = 'Generator' + rup_i = n.df(c).query('ramp_limit_up == ramp_limit_up').index + rdown_i = n.df(c).query('ramp_limit_down == ramp_limit_down').index + if rup_i.empty & rdown_i.empty: + return + p = get_var(n, c, 'p').loc[sns[1:]] + p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]] + + #fix up + gens_i = rup_i & get_non_extendable_i(n, c) + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + return_axes=True)) + rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') + constraints = write_constraint(n, lhs, '<=', rhs) + set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='nonextendables') + + #ext up + gens_i = rup_i & get_extendable_i(n, c) + limit_pu = n.df(c)['ramp_limit_up'][gens_i] + p_nom = get_var(n, c, 'p_nom')[gens_i] + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (-limit_pu, p_nom), return_axes=True)) + constraints = write_constraint(n, lhs, '<=', 0) + set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='extendables') + + #fix down + gens_i = rdown_i & get_non_extendable_i(n, c) + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + return_axes=True)) + rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') + constraints = write_constraint(n, lhs, '>=', rhs) + set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='nonextendables') + + #ext down + gens_i = rdown_i & get_extendable_i(n, c) + limit_pu = n.df(c)['ramp_limit_down'][gens_i] + p_nom = get_var(n, c, 'p_nom')[gens_i] + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_pu, p_nom), return_axes=True)) + constraints = write_constraint(n, lhs, '>=', 0) + set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='extendables') + + +def define_nodal_balance_constraints(n, sns): + + def bus_injection(c, attr, groupcol='bus', sign=1): + #additional sign only necessary for branches in reverse direction + if 'sign' in n.df(c): + sign = sign * n.df(c).sign + vals = linexpr((sign, get_var(n, c, attr)), return_axes=True) + return pd.DataFrame(*vals).rename(columns=n.df(c)[groupcol]) + + # one might reduce this a bit by using n.branches and lookup + args = [['Generator', 'p'], ['Store', 'p'], ['StorageUnit', 'p_dispatch'], + ['StorageUnit', 'p_store', 'bus', -1], ['Line', 's', 'bus0', -1], + ['Line', 's', 'bus1', 1], ['Transformer', 's', 'bus0', -1], + ['Transformer', 's', 'bus1', 1], ['Link', 'p', 'bus0', -1], + ['Link', 'p', 'bus1', n.links.efficiency]] + args = [arg for arg in args if not n.df(arg[0]).empty] + + lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) + .groupby(axis=1, level=0) + .agg(lambda x: ''.join(x.values)) + .reindex(columns=n.buses.index)) + sense = '=' + rhs = ((- n.loads_t.p_set * n.loads.sign) + .groupby(n.loads.bus, axis=1).sum() + .reindex(columns=n.buses.index, fill_value=0)) + constraints = write_constraint(n, lhs, sense, rhs) + set_conref(n, constraints, 'Bus', 'nodal_balance') + + +def define_kirchhoff_constraints(n): + weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + + def cycle_flow(ds): + ds = ds[lambda ds: ds!=0.].dropna() + vals = linexpr((ds, get_var(n, 'Line', 's')[ds.index])) + '\n' + return vals.sum(1) + + constraints = [] + for sub in n.sub_networks.obj: + find_cycles(sub) + C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + if C.empty: + continue + C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + con = write_constraint(n, C_weighted.apply(cycle_flow), '=', 0) + constraints.append(con) + constraints = pd.concat(constraints, axis=1, ignore_index=True) + set_conref(n, constraints, 'Line', 'kirchhoff_voltage') + + +def define_storage_unit_constraints(n, sns): + sus_i = n.storage_units.index + if sus_i.empty: return + c = 'StorageUnit' + #spillage + upper = get_as_dense(n, c, 'inflow').loc[:, lambda df: df.max() > 0] + spill = write_bound(n, 0, upper) + set_varref(n, spill, 'StorageUnit', 'spill') + + #soc constraint previous_soc + p_store - p_dispatch + inflow - spill == soc + eh = expand_series(n.snapshot_weightings, sus_i) #elapsed hours + + eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + eff_dispatch = expand_series(n.df(c).efficiency_dispatch, sns).T + eff_store = expand_series(n.df(c).efficiency_store, sns).T + + soc = get_var(n, c, 'state_of_charge') + cyclic_i = n.df(c).query('cyclic_state_of_charge').index + noncyclic_i = n.df(c).query('~cyclic_state_of_charge').index + + prev_soc_cyclic = soc.shift().fillna(soc.loc[sns[-1]]) + + coeff_var = [(-1, soc), + (-1/eff_dispatch * eh, get_var(n, c, 'p_dispatch')), + (eff_store * eh, get_var(n, c, 'p_store'))] + lhs, *axes = linexpr(*coeff_var, return_axes=True) + + def masked_term(coeff, var, cols): + return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values + + lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) + lhs += masked_term(eff_stand, prev_soc_cyclic, cyclic_i) + lhs += masked_term(eff_stand.loc[sns[1:]], soc.shift().loc[sns[1:]], noncyclic_i) + + rhs = -get_as_dense(n, c, 'inflow').mul(eh) + rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] + + constraints = write_constraint(n, lhs, '==', rhs) + set_conref(n, constraints, c, 'soc') + + +def define_store_constraints(n, sns): + stores_i = n.stores.index + if stores_i.empty: return + c = 'Store' + variables = write_bound(n, -np.inf, np.inf, axes=[sns, stores_i]) + set_varref(n, variables, c, 'p') + + #previous_e - p == e + eh = expand_series(n.snapshot_weightings, stores_i) #elapsed hours + eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + + e = get_var(n, c, 'e') + cyclic_i = n.df(c).query('e_cyclic').index + noncyclic_i = n.df(c).query('~e_cyclic').index + + previous_e_cyclic = e.shift().fillna(e.loc[sns[-1]]) + + coeff_var = [(-eh, get_var(n, c, 'p')), (-1, e)] + + lhs, *axes = linexpr(*coeff_var, return_axes=True) + + def masked_term(coeff, var, cols): + return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values + + lhs += masked_term(eff_stand, previous_e_cyclic, cyclic_i) + lhs += masked_term(eff_stand.loc[sns[1:]], e.shift().loc[sns[1:]], noncyclic_i) + + rhs = pd.DataFrame(0, sns, stores_i) + rhs.loc[sns[0], noncyclic_i] -= n.df(c)['e_initial'][noncyclic_i] + + constraints = write_constraint(n, lhs, '==', rhs) + set_conref(n, constraints, c, 'soc') + + +def define_global_constraints(n, sns): + glcs = n.global_constraints.query('type == "primary_energy"') + for name, glc in glcs.iterrows(): + carattr = glc.carrier_attribute + emissions = n.carriers.query(f'{carattr} != 0')[carattr] + if emissions.empty: continue + gens = n.generators.query('carrier in @emissions.index') + em_pu = gens.carrier.map(emissions)/gens.efficiency + em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T + vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) + lhs = join_exprs(vals) + rhs = glc.constant + + #storage units + sus = n.storage_units.query('carrier in @emissions.index and ' + 'not cyclic_state_of_charge') + sus_i = sus.index + if not sus.empty: + vals = linexpr((-sus.carrier.map(emissions), + get_var(n, 'StorageUnit', 'state_of_charge').loc[sns[-1], sus_i])) + lhs = lhs + '\n' + join_exprs(vals) + rhs -= sus.carrier.map(emissions) @ sus.state_of_charge_initial + + #stores + n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) + stores = n.stores.query('carrier in @emissions.index and not e_cyclic') + if not stores.empty: + vals = linexpr((-stores.carrier.map(n.emissions), + get_var(n, 'Store', 'e').loc[sns[-1], stores.index])) + lhs = lhs + '\n' + join_exprs(vals) + rhs -= stores.carrier.map(emissions) @ stores.state_of_charge_initial + + + con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + #expansion limits + glcs = n.global_constraints.query('type == ' + '"transmission_volume_expansion_limit"') + substr = lambda s: re.sub('[\[\]\(\)]', '', s) + for name, glc in glcs.iterrows(): + carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lines_ext_i = n.lines.query(f'carrier in @carattr ' + 'and s_nom_extendable').index + links_ext_i = n.links.query(f'carrier in @carattr ' + 'and p_nom_extendable').index + linevars = linexpr((n.lines.length[lines_ext_i], + get_var(n, 'Line', 's_nom')[lines_ext_i])) + linkvars = linexpr((n.links.length[links_ext_i], + get_var(n, 'Link', 'p_nom')[links_ext_i])) + lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + sense = glc.sense + rhs = glc.constant + con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + #expansion cost limits + glcs = n.global_constraints.query('type == ' + '"transmission_expansion_cost_limit"') + for name, glc in glcs.iterrows(): + carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lines_ext_i = n.lines.query(f'carrier in @carattr ' + 'and s_nom_extendable').index + links_ext_i = n.links.query(f'carrier in @carattr ' + 'and p_nom_extendable').index + linevars = linexpr((n.lines.capital_cost[lines_ext_i], + get_var(n, 'Line', 's_nom')[lines_ext_i])) + linkvars = linexpr((n.links.capital_cost[links_ext_i], + get_var(n, 'Link', 'p_nom')[links_ext_i])) + lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + sense = glc.sense + rhs = glc.constant + con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + +def define_objective(n): + for c, attr in lookup.query('marginal_cost').index: + cost = (get_as_dense(n, c, 'marginal_cost') + .loc[:, lambda ds: (ds != 0).all()] + .mul(n.snapshot_weightings, axis=0)) + if cost.empty: continue + terms = linexpr((cost, get_var(n, c, attr)[cost.columns])) + for t in terms.flatten(): + n.objective_f.write(t) + #investment + for c, attr in nominals.items(): + cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] + if cost.empty: continue + terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' + for t in terms.flatten(): + n.objective_f.write(t) + + + +def prepare_lopf(n, snapshots=None, keep_files=False, + extra_functionality=None): + reset_counter() + + #used in kirchhoff and globals + n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) + + cols = ['component', 'name', 'pnl', 'specification'] + n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) + n.constraints = pd.DataFrame(columns=cols).set_index(cols[:2]) + + snapshots = n.snapshots if snapshots is None else snapshots + start = time.time() + def time_info(message): + logger.info(f'{message} {round(time.time()-start, 2)}s') + + n.identifier = ''.join(random.choice(string.ascii_lowercase) + for i in range(8)) + objective_fn = f"/tmp/objective-{n.identifier}.txt" + constraints_fn = f"/tmp/constraints-{n.identifier}.txt" + bounds_fn = f"/tmp/bounds-{n.identifier}.txt" + n.problem_fn = f"/tmp/test-{n.identifier}.lp" + + n.objective_f = open(objective_fn, mode='w') + n.constraints_f = open(constraints_fn, mode='w') + n.bounds_f = open(bounds_fn, mode='w') + + n.objective_f.write('\* LOPF *\n\nmin\nobj:\n') + n.constraints_f.write("\n\ns.t.\n\n") + n.bounds_f.write("\nbounds\n") + + + for c, attr in lookup.query('nominal and not handle_separately').index: + define_nominal_for_extendable_variables(n, c, attr) + define_fixed_variariable_constraints(n, snapshots, c, attr, pnl=False) + for c, attr in lookup.query('not nominal and not handle_separately').index: + define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) + define_dispatch_for_extendable_variables(n, snapshots, c, attr) + define_dispatch_for_extendable_constraints(n, snapshots, c, attr) + define_fixed_variariable_constraints(n, snapshots, c, attr) + + define_ramp_limit_constraints(n, snapshots) + define_storage_unit_constraints(n, snapshots) + define_store_constraints(n, snapshots) + define_kirchhoff_constraints(n) + define_nodal_balance_constraints(n, snapshots) + define_global_constraints(n, snapshots) + define_objective(n) + + if extra_functionality is not None: + extra_functionality(n, snapshots) + + n.objective_f.close() + n.constraints_f.close() + n.bounds_f.write("end\n") + n.bounds_f.close() + + del n.objective_f + del n.constraints_f + del n.bounds_f + + os.system(f"cat {objective_fn} {constraints_fn} {bounds_fn} " + f"> {n.problem_fn}") + + time_info('Total preparation time:') + + if not keep_files: + for fn in [objective_fn, constraints_fn, bounds_fn]: + os.system("rm "+ fn) + + +def assign_solution(n, sns, variables_sol, constraints_dual, + extra_postprocessing, keep_references=False): + pop = not keep_references + #solutions + def map_solution(c, attr, pnl): + if pnl: + variables = get_var(n, c, attr, pop=pop) + if variables.empty: return + values = variables.stack().map(variables_sol).unstack() + if c in n.passive_branch_components: + n.pnl(c)['p0'] = values + n.pnl(c)['p1'] = - values + elif c == 'Link': + n.pnl(c)['p0'] = values + n.pnl(c)['p1'] = - values * n.df(c).efficiency + else: + n.pnl(c)[attr] = values + elif not get_extendable_i(n, c).empty: + n.df(c)[attr+'_opt'] = get_var(n, c, attr, pop=pop)\ + .map(variables_sol).fillna(n.df(c)[attr]) + else: + n.df(c)[attr+'_opt'] = n.df(c)[attr] + + for (c, attr), pnl in n.variables.pnl.items(): + map_solution(c, attr, pnl) + + if not n.df('StorageUnit').empty: + c = 'StorageUnit' + n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] + + #duals + def map_dual(c, attr, pnl): + if pnl: + n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() + .map(-constraints_dual).unstack()) + else: + n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(-constraints_dual) + + for (c, attr), pnl in n.constraints.pnl.items(): + map_dual(c, attr, pnl) + + #load + n.loads_t.p = n.loads_t.p_set + + #injection, why does it include injection in hvdc 'network' + ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), + ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), + ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] + sign = lambda c: n.df(c).sign if 'sign' in n.df(c) else -1 #sign for 'Link' + n.buses_t.p = pd.concat( + [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) + for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum() + + def v_ang_for_(sub): + buses_i = sub.buses_o + if len(buses_i) == 1: return + sub.calculate_B_H(skip_pre=True) + if len(sub.buses_i()) == 1: return + Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) + Z -= Z[sub.slack_bus] + return n.buses_t.p[buses_i] @ Z + n.buses_t.v_ang = (pd.concat( + [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) + .reindex(columns=n.buses.index, fill_value=0)) + + + + +def network_lopf(n, snapshots=None, solver_name="cbc", + solver_logfile=None, extra_functionality=None, + extra_postprocessing=None, formulation="kirchhoff", + keep_references=False, keep_files=False, solver_options={}, + warmstart=False, store_basis=True): + """ + Linear optimal power flow for a group of snapshots. + + Parameters + ---------- + snapshots : list or index slice + A list of snapshots to optimise, must be a subset of + network.snapshots, defaults to network.snapshots + solver_name : string + Must be a solver name that pyomo recognises and that is + installed, e.g. "glpk", "gurobi" + skip_pre : bool, default False + Skip the preliminary steps of computing topology, calculating + dependent values and finding bus controls. + extra_functionality : callable function + This function must take two arguments + `extra_functionality(network,snapshots)` and is called after + the model building is complete, but before it is sent to the + solver. It allows the user to + add/change constraints and add/change the objective function. + solver_logfile : None|string + If not None, sets the logfile option of the solver. + solver_options : dictionary + A dictionary with additional options that get passed to the solver. + (e.g. {'threads':2} tells gurobi to use only 2 cpus) + keep_files : bool, default False + Keep the files that pyomo constructs from OPF problem + construction, e.g. .lp file - useful for debugging + formulation : string + Formulation of the linear power flow equations to use; only "kirchhoff" + is currently supported + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user to + extract further information about the solution, such as additional + shadow prices. + + Returns + ------- + None + """ + supported_solvers = ["cbc", "gurobi", 'glpk', 'scs'] + if solver_name not in supported_solvers: + raise NotImplementedError(f"Solver {solver_name} not in " + f"supported solvers: {supported_solvers}") + + if formulation != "kirchhoff": + raise NotImplementedError("Only the kirchhoff formulation is supported") + + #disable logging because multiple slack bus calculations, keep output clean + logging.disable() + snapshots = _as_snapshots(n, snapshots) + n.calculate_dependent_values() + n.determine_network_topology() + for sub in n.sub_networks.obj: + sub.find_bus_controls() + logging.disable(1) + + if solver_logfile is None: + solver_logfile = "test.log" + + logger.info("Prepare linear problem") + prepare_lopf(n, snapshots, keep_files, extra_functionality) + gc.collect() + solution_fn = "/tmp/test-{}.sol".format(n.identifier) + + if warmstart == True: + warmstart = n.basis_fn + logger.info("Solve linear problem using warmstart") + else: + logger.info("Solve linear problem") + + solve = getattr(opt_lm, f'run_and_read_{solver_name}') + res = solve(n, n.problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart, store_basis) + status, termination_condition, variables_sol, constraints_dual, obj = res + del n.problem_fn + + if termination_condition != "optimal": + return status,termination_condition + + #adjust objective value + for c, attr in nominals.items(): + obj -= n.df(c)[attr] @ n.df(c).capital_cost + n.objective = obj + gc.collect() + assign_solution(n, snapshots, variables_sol, constraints_dual, + extra_postprocessing, keep_references=keep_references) + gc.collect() + + return status,termination_condition + + +def ilopf(n, snapshots=None, msq_threshold=0.05, min_iterations=1, + max_iterations=100, **kwargs): + ''' + Iterative linear optimization updating the line parameters for passive + AC and DC lines. This is helpful when line expansion is enabled. After each + sucessful solving, line impedances and line resistance are recalculated + based on the optimization result. If warmstart is possible, it uses the + result from the previous iteration to fasten the optimization. + + Parameters + ---------- + snapshots : list or index slice + A list of snapshots to optimise, must be a subset of + network.snapshots, defaults to network.snapshots + msq_threshold: float, default 0.05 + Maximal mean square difference between optimized line capacity of + the current and the previous iteration. As soon as this threshold is + undercut, and the number of iterations is bigger than 'min_iterations' + the iterative optimization stops + min_iterations : integer, default 1 + Minimal number of iteration to run regardless whether the msq_threshold + is already undercut + max_iterations : integer, default 100 + Maximal numbder of iterations to run regardless whether msq_threshold + is already undercut + **kwargs + Keyword arguments of the lopf function which runs at each iteration + + ''' + + ext_i = get_extendable_i(n, 'Line') + typed_i = n.lines.query('type != ""').index + ext_untyped_i = ext_i.difference(typed_i) + ext_typed_i = ext_i & typed_i + base_s_nom = (np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * + n.lines.bus0.map(n.buses.v_nom)) + n.lines.loc[ext_typed_i, 'num_parallel'] = (n.lines.s_nom/base_s_nom)[ext_typed_i] + + def update_line_params(n, s_nom_prev): + factor = n.lines.s_nom_opt / s_nom_prev + for attr, carrier in (('x', 'AC'), ('r', 'DC')): + ln_i = (n.lines.query('carrier == @carrier').index & ext_untyped_i) + n.lines.loc[ln_i, attr] /= factor[ln_i] + ln_i = ext_i & typed_i + n.lines.loc[ln_i, 'num_parallel'] = (n.lines.s_nom_opt/base_s_nom)[ln_i] + + def msq_diff(n, s_nom_prev): + lines_err = np.sqrt((s_nom_prev - n.lines.s_nom_opt).pow(2).mean()) / \ + n.lines['s_nom_opt'].mean() + logger.info(f"Mean square difference after iteration {iteration} is " + f"{lines_err}") + return lines_err + + iteration = 0 + diff = msq_threshold + while diff >= msq_threshold or iteration < min_iterations: + if iteration >= max_iterations: + logger.info(f'Iteration {iteration} beyond max_iterations ' + f'{max_iterations}. Stopping ...') + break + + s_nom_prev = n.lines.s_nom_opt if iteration else n.lines.s_nom + kwargs['warmstart'] = bool(iteration and ('basis_fn' in n.__dir__())) +# import pdb; pdb.set_trace() + network_lopf(n, snapshots, **kwargs) + update_line_params(n, s_nom_prev) + diff = msq_diff(n, s_nom_prev) + iteration += 1 + diff --git a/pypsa/opt_lm.py b/pypsa/opt_lm.py new file mode 100644 index 000000000..e02cac4d2 --- /dev/null +++ b/pypsa/opt_lm.py @@ -0,0 +1,585 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Sat Sep 7 17:38:10 2019 + +@author: fabian +""" + +import pandas as pd +import os, gurobipy, logging, re, io, subprocess +import numpy as np +from .descriptors import get_switchable_as_dense as get_as_dense +from pandas import IndexSlice as idx + +lookup = pd.read_csv(os.path.dirname(__file__) + '/variables.csv', + index_col=['component', 'variable']) +#prefix = lookup.droplevel(1).prefix[lambda ds: ~ds.index.duplicated()] +nominals = lookup.query('nominal').reset_index(level='variable').variable + +# ============================================================================= +# writing functions +# ============================================================================= + +xCounter = 0 +cCounter = 0 +def reset_counter(): + global xCounter, cCounter + xCounter, cCounter = 0, 0 + + +def write_bound(n, lower, upper, axes=None): + """ + Writer function for writing out mutliple variables at a time. If lower and + upper are floats it demands to give pass axes, a tuple of (index, columns) + or (index), for creating the variable of same upper and lower bounds. + Return a series or frame with variable references. + """ + axes = [axes] if isinstance(axes, pd.Index) else axes + if axes is None: + axes, shape = broadcasted_axes(lower, upper) + else: + shape = tuple(map(len, axes)) + ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series + length = np.prod(shape) + global xCounter + xCounter += length + variables = np.array([f'x{x}' for x in range(xCounter - length, xCounter)], + dtype=object).reshape(shape) + lower, upper = _str_array(lower), _str_array(upper) + for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): + n.bounds_f.write(s) + return ser_or_frame(variables, *axes) + +def write_constraint(n, lhs, sense, rhs, axes=None): + """ + Writer function for writing out mutliple constraints to the corresponding + constraints file. If lower and upper are numpy.ndarrays it axes must not be + None but a tuple of (index, columns) or (index). + Return a series or frame with constraint references. + """ + axes = [axes] if isinstance(axes, pd.Index) else axes + if axes is None: + axes, shape = broadcasted_axes(lhs, rhs) + else: + shape = tuple(map(len, axes)) + ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series + length = np.prod(shape) + global cCounter + cCounter += length + cons = np.array([f'c{x}' for x in range(cCounter - length, cCounter)], + dtype=object).reshape(shape) + if isinstance(sense, str): + sense = '=' if sense == '==' else sense + lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) + for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): + n.constraints_f.write(c) + return ser_or_frame(cons, *axes) + + +# ============================================================================= +# helpers, helper functions +# ============================================================================= + +var_ref_suffix = '_varref' # after solving replace with '_opt' +con_ref_suffix = '_conref' # after solving replace with '' + +def broadcasted_axes(*dfs): + """ + Helper function which, from a collection of arrays, series, frames and other + values, retrieves the axes of series and frames which result from + broadcasting operations. It checks whether index and columns of given + series and frames, repespectively, are aligned. Using this function allows + to subsequently use pure numpy operations and keep the axes in the + background. + """ + axes = [] + shape = () + for df in dfs: + if isinstance(df, (pd.Series, pd.DataFrame)): + if len(axes): + assert (axes[-1] == df.axes[-1]).all(), ('Series or DataFrames ' + 'are not aligned') + axes = df.axes if len(df.axes) > len(axes) else axes + shape = tuple(map(len, axes)) + return axes, shape + + +def linexpr(*tuples, return_axes=False): + """ + Elementwise concatenation of tuples in the form (coefficient, variables). + Coefficient and variables can be arrays, series or frames. Returns + a np.ndarray of strings. If return_axes is set to True and a pd.Series or + pd.DataFrame was past, the corresponding index (and column if existent) is + returned additionaly. + + Parameters + ---------- + tulples: tuple of tuples + Each tuple must of the form (coeff, var), where + * coeff is a numerical value, or a numeical array, series, frame + * var is a str or a array, series, frame of variable strings + return_axes: Boolean, default False + Whether to return index and column (if existent) + + Example + ------- + >>> coeff1 = 1 + >>> var1 = pd.Series(['a1', 'a2', 'a3']) + >>> coeff2 = pd.Series([-0.5, -0.3, -1]) + >>> var2 = pd.Series(['b1', 'b2', 'b3']) + + >>> linexpr((coeff1, var1), (coeff2, var2)) + array(['+1.0 a1\n-0.5 b1\n', '+1.0 a2\n-0.3 b2\n', '+1.0 a3\n-1.0 b3\n'], + dtype=object) + + + For turning the result into a series or frame again: + >>> pd.Series(*linexpr((coeff1, var1), (coeff2, var2), return_axes=True)) + 0 +1.0 a1\n-0.5 b1\n + 1 +1.0 a2\n-0.3 b2\n + 2 +1.0 a3\n-1.0 b3\n + dtype: object + + This can also be applied to DataFrames, using + pd.DataFrame(*linexpr(..., return_axes=True)). + """ + axes, shape = broadcasted_axes(*sum(tuples, ())) + expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) + if np.prod(shape): + for coeff, var in tuples: + expr += _str_array(coeff) + _str_array(var) + '\n' + if return_axes: + return (expr, *axes) + return expr + + +def _str_array(array): + if isinstance(array, (float, int)): + array = f'+{float(array)} ' if array >= 0 else f'{float(array)} ' + elif isinstance(array, (pd.Series, pd.DataFrame)): + array = array.values + if isinstance(array, np.ndarray): + if not (array.dtype == object) and array.size: + signs = pd.Series(array) if array.ndim == 1 else pd.DataFrame(array) + signs = (signs.pipe(np.sign) + .replace([0, 1, -1], ['+', '+', '-']).values) + array = signs + abs(array).astype(str) + ' ' + return array + + +def join_exprs(df): + """ + Helper function to join arrays, series or frames of stings together. + """ + return ''.join(np.asarray(df).flatten()) + +def expand_series(ser, columns): + """ + Helper function to fastly expand a series to a dataframe with according + column axis and every single column being the equal to the given series. + """ + return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) + +# ============================================================================= +# 'getter' functions +# ============================================================================= +def get_extendable_i(n, c): + """ + Getter function. Get the index of extendable elements of a given component. + """ + return n.df(c)[lambda ds: + ds[nominals[c] + '_extendable']].index + +def get_non_extendable_i(n, c): + """ + Getter function. Get the index of non-extendable elements of a given + component. + """ + return n.df(c)[lambda ds: + ~ds[nominals[c] + '_extendable']].index + +def get_bounds_pu(n, c, sns, index=slice(None), attr=None): + """ + Getter function to retrieve the per unit bounds of a given compoent for + given snapshots and possible subset of elements (e.g. non-extendables). + Depending on the attr you can further specify the bounds of the variable + you are looking at, e.g. p_store for storage units. + + Parameters + ---------- + n : pypsa.Network + c : string + Component name, e.g. "Generator", "Line". + sns : pandas.Index/pandas.DateTimeIndex + set of snapshots for the bounds + index : pd.Index, default None + Subset of the component elements. If None (default) bounds of all + elements are returned. + attr : string, default None + attribute name for the bounds, e.g. "p", "s", "p_store" + + """ + min_pu_str = nominals[c].replace('nom', 'min_pu') + max_pu_str = nominals[c].replace('nom', 'max_pu') + + max_pu = get_as_dense(n, c, max_pu_str, sns) + if c in n.passive_branch_components: + min_pu = - max_pu + elif c == 'StorageUnit': + min_pu = pd.DataFrame(0, max_pu.index, max_pu.columns) + if attr == 'p_store': + max_pu = - get_as_dense(n, c, min_pu_str, sns) + if attr == 'state_of_charge': + max_pu = expand_series(n.df(c).max_hours, sns).T + min_pu = pd.DataFrame(0, *max_pu.axes) + else: + min_pu = get_as_dense(n, c, min_pu_str, sns) + return min_pu[index], max_pu[index] + + +# ============================================================================= +# references to vars and cons, rewrite this part to not store every reference +# ============================================================================= +def _add_reference(n, df, c, attr, suffix, pnl=True): + attr_name = attr + suffix + if pnl: + if attr_name in n.pnl(c): + n.pnl(c)[attr_name][df.columns] = df + else: + n.pnl(c)[attr_name] = df + if n.pnl(c)[attr_name].shape[1] == n.df(c).shape[0]: + n.pnl(c)[attr_name] = n.pnl(c)[attr_name].reindex(columns=n.df(c).index) + else: + n.df(c).loc[df.index, attr_name] = df + +def set_varref(n, variables, c, attr, pnl=True, spec=''): + """ + Sets variable references to the network. + If pnl is False it stores a series of variable names in the static + dataframe of the given component. The columns name is then given by the + attribute name attr and the globally define var_ref_suffix. + If pnl is True if stores the given frame of references in the component + dict of time-depending quantities, e.g. network.generators_t . + """ + if not variables.empty: + if ((c, attr) in n.variables.index) and (spec != ''): + n.variables.at[idx[c, attr], 'specification'] += ', ' + spec + else: + n.variables.loc[idx[c, attr], :] = [pnl, spec] + _add_reference(n, variables, c, attr, var_ref_suffix, pnl=pnl) + +def set_conref(n, constraints, c, attr, pnl=True, spec=''): + """ + Sets constraint references to the network. + If pnl is False it stores a series of constraints names in the static + dataframe of the given component. The columns name is then given by the + attribute name attr and the globally define con_ref_suffix. + If pnl is True if stores the given frame of references in the component + dict of time-depending quantities, e.g. network.generators_t . + """ + if not constraints.empty: + if ((c, attr) in n.constraints.index) and (spec != ''): + n.constraints.at[idx[c, attr], 'specification'] += ', ' + spec + else: + n.constraints.loc[idx[c, attr], :] = [pnl, spec] + _add_reference(n, constraints, c, attr, con_ref_suffix, pnl=pnl) + + +def get_var(n, c, attr, pop=False): + ''' + Retrieves variable references for a given static or time-depending + attribute of a given component. The function looks into n.variables to + detect whether the variable is a time-dependent or static. + + Parameters + ---------- + n : pypsa.Network + c : str + component name to which the constraint belongs + attr: str + attribute name of the constraints + + Example + ------- + get_var(n, 'Generator', 'p') + + ''' + if n.variables.at[idx[c, attr], 'pnl']: + if pop: + return n.pnl(c).pop(attr + var_ref_suffix) + return n.pnl(c)[attr + var_ref_suffix] + else: + if pop: + return n.df(c).pop(attr + var_ref_suffix) + return n.df(c)[attr + var_ref_suffix] + + +def get_con(n, c, attr, pop=False): + """ + Retrieves constraint references for a given static or time-depending + attribute of a give component. + + Parameters + ---------- + n : pypsa.Network + c : str + component name to which the constraint belongs + attr: str + attribute name of the constraints + + Example + ------- + get_con(n, 'Generator', 'mu_upper') + """ + if n.constraints.at[idx[c, attr], 'pnl']: + if pop: + return n.pnl(c).pop(attr + con_ref_suffix) + return n.pnl(c)[attr + con_ref_suffix] + else: + if pop: + return n.df(c).pop(attr + con_ref_suffix) + return n.df(c)[attr + con_ref_suffix] + + +# ============================================================================= +# solvers +# ============================================================================= + +def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + #printingOptions is about what goes in solution file + command = f"cbc -printingOptions all -import {problem_fn} " + if warmstart: + command += f'-basisI {warmstart} ' + if (solver_options is not None) and (solver_options != {}): + command += solver_options + command += f"-solve -solu {solution_fn} " + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + command += f'-basisO {n.basis_fn} ' + + if solver_logfile is None: + os.system(command) + else: + result = subprocess.run(command.split(' '), stdout=subprocess.PIPE) + print(result.stdout.decode('utf-8'), file=open(solver_logfile, 'w')) + + f = open(solution_fn,"r") + data = f.readline() + f.close() + + if data.startswith("Optimal - objective value"): + status = "optimal" + termination_condition = status + objective = float(data[len("Optimal - objective value "):]) + elif "Infeasible" in data: + termination_condition = "infeasible" + else: + termination_condition = "other" + + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + sol = pd.read_csv(solution_fn, header=None, skiprows=[0], + sep=r'\s+', usecols=[1,2,3], index_col=0) + variables_b = sol.index.str[0] == 'x' + variables_sol = sol[variables_b][2] + constraints_dual = sol[~variables_b][3] + + if not keep_files: + os.system("rm "+ problem_fn) + os.system("rm "+ solution_fn) + + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + +def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + # for solver_options lookup https://kam.mff.cuni.cz/~elias/glpk.pdf + command = (f"glpsol --lp {problem_fn} --output {solution_fn}") + if solver_logfile is not None: + command += f' --log {solver_logfile}' + if warmstart: + command += f' --ini {warmstart}' + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + command += f' -w {n.basis_fn}' + if (solver_options is not None) and (solver_options != {}): + command += solver_options + + os.system(command) + + data = open(solution_fn) + info = '' + linebreak = False + while not linebreak: + line = data.readline() + linebreak = line == '\n' + info += line + info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] + status = info.Status.lower().strip() + objective = float(re.sub('[^0-9]+', '', info.Objective)) + termination_condition = status + + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + sol = pd.read_fwf(data).set_index('Row name') + variables_b = sol.index.str[0] == 'x' + variables_sol = sol[variables_b]['Activity'].astype(float) + sol = sol[~variables_b] + constraints_b = sol.index.str[0] == 'c' + constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') + .fillna(0)) + + if not keep_files: + os.system("rm "+ problem_fn) + os.system("rm "+ solution_fn) + + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + +def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + # for solver options see + # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html + if (solver_logfile is not None) and (solver_options is not None): + solver_options["logfile"] = solver_logfile + logging.disable() + m = gurobipy.read(problem_fn) + + if solver_options is not None: + for key, value in solver_options.items(): + m.setParam(key, value) + + if warmstart: + m.read(warmstart) + + m.optimize() + logging.disable(1) + + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + try: + m.write(n.basis_fn) + except gurobipy.GurobiError: + logging.info('No model basis stored') + del n.basis_fn + + if not keep_files: + os.system("rm "+ problem_fn) + + Status = gurobipy.GRB.Status + statusmap = {getattr(Status, s) : s.lower() for s in Status.__dir__() + if not s.startswith('_')} + status = statusmap[m.status] + termination_condition = status + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}) + constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) + termination_condition = status + objective = m.ObjVal + del m + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + +#From https://github.com/bodono/scs-python +def run_and_read_scs(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + # Follow https://stackoverflow.com/questions/38647230/get-constraints\ + # -in-matrix-format-from-gurobipy + import scipy as sc + from scipy.sparse import csc_matrix as csc + import scs + + m = gurobipy.read(problem_fn) + + dvars = pd.Series(m.getVars()) + obj_coeffs = np.array(m.getAttr('Obj', dvars)) + + constrs = pd.DataFrame({'Con': m.getConstrs()}) + constrs['sense'] = constrs.Con.apply(lambda c: c.Sense).astype('category')\ + .cat.set_categories(['=', '>', '<']) + constrs = constrs.sort_values('sense').reset_index(drop=True) + constrs['sign'] = constrs.sense.replace({'=':1, '>':-1, '<':1}) + constrs['rhs'] = constrs.Con.apply(lambda c: c.RHS) * constrs.sign + + var_indices = {v: i for i, v in enumerate(dvars)} + def get_expr_coos(expr): + for i in range(expr.size()): + dvar = expr.getVar(i) + yield expr.getCoeff(i), var_indices[dvar] + + def get_matrix_coo(m): + for row_idx, (con, sign) in enumerate(constrs[['Con', 'sign']].values): + for coeff, col_idx in get_expr_coos(m.getRow(con)): + yield row_idx, col_idx, sign * coeff + + condata = pd.DataFrame(get_matrix_coo(m), columns=['row', 'col', 'coeff']) + A = csc((condata.coeff, (condata.row, condata.col))) + #extend A and rhs by variable bound constraints + ub = dvars.apply(lambda v: v.UB)[lambda x: x!=1e100] + lb = dvars.apply(lambda v: v.LB)[lambda x: x!=-1e100] + A_ub = csc((np.ones(len(ub)), (range(len(ub)), ub.index))) + A_lb = csc((-np.ones(len(lb)), (range(len(lb)), lb.index))) + + A = sc.sparse.vstack((A, A_ub, A_lb)) + b = np.hstack((constrs.rhs, ub, -lb)) + + # initialize and solve + data = {'A': A, 'b': b, 'c': obj_coeffs} + N_eq_con = int((constrs.sense == '=').sum()) + N_in_con = len(b) - N_eq_con + K = {'f': N_eq_con, 'l': N_in_con} + sol = scs.solve(data, K, **solver_options) + + varnames = dvars.apply(lambda v: v.VarName) + connames = constrs.Con.apply(lambda c: c.ConstrName) + variables_sol = pd.Series(sol['x'], varnames) + constraints_dual = - pd.Series(sol['y'][:len(constrs)], connames) + objective = sol['info']['pobj'] + status = sol['info']['statusVal'] +# import pdb; pdb.set_trace() + termination_condition = 'optimal' if status <= 3 else 'non-optimal' + del m + + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + + +#%% +# if warmstart: +# if network is None: +# ValueError('Network must be given to set a warmstart') +# n = network +# for (c, attr), pnl in n.variables.pnl.items(): +# if pnl: +# attr_name = 'p0' if c in n.branch_components else attr +# start = n.pnl(c)[attr_name].stack() +# else: +# if (attr + '_opt') not in n.df(c): +# continue +# start = n.df(c)[attr + '_opt'].dropna() +# var = get_var(n, c, attr) +# var = var.stack().dropna() if pnl else var.dropna() +# var, start = var.align(start, join='inner') +# for v, s in np.column_stack((var.values, start.values)): +# m.getVarByName(v).PStart = s +# for (c, attr), pnl in n.constraints.pnl.items(): +# start = n.pnl(c)[attr].stack() if pnl else n.df(c)[attr].dropna() +# con = get_con(n, c, attr) +# con = con.stack().dropna() if pnl else con.dropna() +# con, start = con.align(start, join='inner') +# for cc, s in np.column_stack((con.values, start.values)): +# m.getConstrByName(cc).DStart = s \ No newline at end of file diff --git a/pypsa/variables.csv b/pypsa/variables.csv new file mode 100644 index 000000000..37873372d --- /dev/null +++ b/pypsa/variables.csv @@ -0,0 +1,18 @@ +component,variable,marginal_cost,nominal,handle_separately +Generator,p,True,False,False +Generator,p_nom,False,True,False +Line,s,False,False,False +Line,s_nom,False,True,False +Transformer,s,False,False,False +Transformer,s_nom,False,True,False +Link,p,True,False,False +Link,p_nom,False,True,False +Store,e,False,False,False +Store,p,True,False,True +Store,e_nom,False,True,False +StorageUnit,p_dispatch,True,False,False +StorageUnit,p_store,False,False,False +StorageUnit,state_of_charge,False,False,False +StorageUnit,p_nom,False,True,False +StorageUnit,spill,False,False,True + diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 21fc9013b..7225918ca 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -17,37 +17,48 @@ from distutils.spawn import find_executable - +from numpy.testing import assert_array_almost_equal as equal def test_lopf(): csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/ac-dc-meshed/ac-dc-data") - network = pypsa.Network(csv_folder_name) + n = pypsa.Network(csv_folder_name) + n.links_t.p_set.drop(columns=n.links.index, inplace=True) + results_folder_name = os.path.join(csv_folder_name,"results-lopf") - network_r = pypsa.Network(results_folder_name) + n_r = pypsa.Network(results_folder_name) #test results were generated with GLPK; solution should be unique, #so other solvers should not differ (tested with cbc and gurobi) solver_name = "cbc" - snapshots = network.snapshots + snapshots = n.snapshots for formulation, free_memory in product(["angles", "cycles", "kirchhoff", "ptdf"], [{}, {"pypsa"}]): - network.lopf(snapshots=snapshots,solver_name=solver_name,formulation=formulation, free_memory=free_memory) - print(network.generators_t.p.loc[:,network.generators.index]) - print(network_r.generators_t.p.loc[:,network.generators.index]) - - np.testing.assert_array_almost_equal(network.generators_t.p.loc[:,network.generators.index],network_r.generators_t.p.loc[:,network.generators.index],decimal=4) - - np.testing.assert_array_almost_equal(network.lines_t.p0.loc[:,network.lines.index],network_r.lines_t.p0.loc[:,network.lines.index],decimal=4) - - np.testing.assert_array_almost_equal(network.links_t.p0.loc[:,network.links.index],network_r.links_t.p0.loc[:,network.links.index],decimal=4) + n.lopf(snapshots=snapshots, solver_name=solver_name, + formulation=formulation, free_memory=free_memory) + + equal(n.generators_t.p.loc[:,n.generators.index], + n_r.generators_t.p.loc[:,n.generators.index],decimal=4) + equal(n.lines_t.p0.loc[:,n.lines.index], + n_r.lines_t.p0.loc[:,n.lines.index],decimal=4) + equal(n.links_t.p0.loc[:,n.links.index], + n_r.links_t.p0.loc[:,n.links.index],decimal=4) + + n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) + + equal(n.generators_t.p.loc[:,n.generators.index], + n_r.generators_t.p.loc[:,n.generators.index],decimal=4) + equal(n.lines_t.p0.loc[:,n.lines.index], + n_r.lines_t.p0.loc[:,n.lines.index],decimal=4) + equal(n.links_t.p0.loc[:,n.links.index], + n_r.links_t.p0.loc[:,n.links.index],decimal=4) From 340cab5aff619de5c99f602b117de989bae147ac Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Oct 2019 18:56:44 +0200 Subject: [PATCH 002/111] fix environment --- environment.yaml | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/environment.yaml b/environment.yaml index efcfc81d0..571137d7c 100644 --- a/environment.yaml +++ b/environment.yaml @@ -14,3 +14,4 @@ dependencies: - cartopy>=0.16 - coincbc - glpk + - gurobipy \ No newline at end of file diff --git a/setup.py b/setup.py index 9c248ea1c..88cb1bdce 100644 --- a/setup.py +++ b/setup.py @@ -29,6 +29,7 @@ 'pyomo>=5.3', 'matplotlib', 'networkx>=1.10', + 'gurobipy' ], extras_require = { "cartopy": ['cartopy>=0.16'], From e2599778c5dfa7739013ebb8701ee7eba391d83b Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 12:07:16 +0200 Subject: [PATCH 003/111] opt_lm: fix dict update for python 2.7 --- .travis.yml | 2 +- pypsa/components.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3aa0e6ff7..b95580f0a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ before_install: - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes --set changeps1 no - - conda update -q conda + # - conda update -q conda # Useful for debugging any issues with conda - conda info -a - source $HOME/miniconda/etc/profile.d/conda.sh diff --git a/pypsa/components.py b/pypsa/components.py index d14a558ec..53fe41587 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -484,10 +484,11 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, 'extra_functionality': extra_functionality, 'extra_postprocessing': extra_postprocessing, 'solver_name': solver_name, 'solver_logfile': solver_logfile} + args.update(kwargs) if pyomo: - return network_lopf(self, **args, **kwargs) + return network_lopf(self, **args) else: - return network_lopf_lm(self, **args, **kwargs) + return network_lopf_lm(self, **args) From 4522d922f7c228135f6b7a653886f68446ae81f8 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 12:13:05 +0200 Subject: [PATCH 004/111] travis.yml revert last change --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index b95580f0a..3aa0e6ff7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ before_install: - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes --set changeps1 no - # - conda update -q conda + - conda update -q conda # Useful for debugging any issues with conda - conda info -a - source $HOME/miniconda/etc/profile.d/conda.sh From d6eb31dd1a6c3423c9c19bb5350d751707104c3b Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 12:28:58 +0200 Subject: [PATCH 005/111] environment: fix gurobi channel --- environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yaml b/environment.yaml index 571137d7c..2e0687a40 100644 --- a/environment.yaml +++ b/environment.yaml @@ -14,4 +14,4 @@ dependencies: - cartopy>=0.16 - coincbc - glpk - - gurobipy \ No newline at end of file + - gurobi::gurobi \ No newline at end of file From 55d4a8285d18d231f1f7dc81af612c5f9e36acb9 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 12:40:31 +0200 Subject: [PATCH 006/111] travis: play around, problem in 'before install' section --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 3aa0e6ff7..b95580f0a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ before_install: - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes --set changeps1 no - - conda update -q conda + # - conda update -q conda # Useful for debugging any issues with conda - conda info -a - source $HOME/miniconda/etc/profile.d/conda.sh From c3c194fd1044e29afe15cfeac5e25e134d762194 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 13:02:23 +0200 Subject: [PATCH 007/111] opt_lm: move gurobipy import in function --- pypsa/opt_lm.py | 4 +++- setup.py | 6 +++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/pypsa/opt_lm.py b/pypsa/opt_lm.py index e02cac4d2..6fa7bc941 100644 --- a/pypsa/opt_lm.py +++ b/pypsa/opt_lm.py @@ -7,11 +7,12 @@ """ import pandas as pd -import os, gurobipy, logging, re, io, subprocess +import os, logging, re, io, subprocess import numpy as np from .descriptors import get_switchable_as_dense as get_as_dense from pandas import IndexSlice as idx + lookup = pd.read_csv(os.path.dirname(__file__) + '/variables.csv', index_col=['component', 'variable']) #prefix = lookup.droplevel(1).prefix[lambda ds: ~ds.index.duplicated()] @@ -447,6 +448,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart=None, store_basis=True): + import gurobipy # for solver options see # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html if (solver_logfile is not None) and (solver_options is not None): diff --git a/setup.py b/setup.py index 88cb1bdce..328a263d6 100644 --- a/setup.py +++ b/setup.py @@ -28,12 +28,12 @@ 'pandas>=0.19.0', 'pyomo>=5.3', 'matplotlib', - 'networkx>=1.10', - 'gurobipy' + 'networkx>=1.10' ], extras_require = { "cartopy": ['cartopy>=0.16'], - "docs": ["numpydoc", "sphinx", "sphinx_rtd_theme"] + "docs": ["numpydoc", "sphinx", "sphinx_rtd_theme"], + 'gurobipy':['gurobipy'] }, classifiers=[ 'Development Status :: 3 - Alpha', From afa98b2ec8a74ebae5f839202baf7609bce58f97 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 13:08:17 +0200 Subject: [PATCH 008/111] MANIFEST add varbiales.csv --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 1ee75eeeb..8b187018f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,6 @@ include pypsa/component_attrs/*.csv include pypsa/standard_types/*.csv include pypsa/components.csv +include pypsa/variables.csv include README.rst LICENSE.txt include requirements.yml From c0c274066f13ca0e85203f9753fce4814fb5b0fc Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 13:52:29 +0200 Subject: [PATCH 009/111] test: make lopf-tests more lightweight, include pyomo=False --- test/test_ac_dc_lopf.py | 12 +--------- test/test_opf_storage.py | 49 ++++++++++------------------------------ 2 files changed, 13 insertions(+), 48 deletions(-) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 7225918ca..07d14cbf6 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -3,20 +3,10 @@ import pypsa -import datetime -import pandas as pd - -import networkx as nx - -import numpy as np - -from itertools import chain, product +from itertools import product import os - - -from distutils.spawn import find_executable from numpy.testing import assert_array_almost_equal as equal diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 62a21df35..fd14336a9 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -3,59 +3,34 @@ import pypsa -import datetime import pandas as pd -import networkx as nx - -import numpy as np - -from itertools import chain +from itertools import product import os +from numpy.testing import assert_array_almost_equal as equal -from distutils.spawn import find_executable +def test_opf(pyomo=True): -def test_opf(): + csv_folder_name = os.path.join(os.path.dirname(__file__), + "../examples/opf-storage-hvdc/opf-storage-data") + n = pypsa.Network(csv_folder_name) - csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/opf-storage-hvdc/opf-storage-data") + target_path = os.path.join(csv_folder_name,"results","generators-p.csv") - network = pypsa.Network(csv_folder_name) + target_gen_p = pd.read_csv(target_path, index_col=0) #test results were generated with GLPK and other solvers may differ - solver_name = "glpk" - - snapshots = network.snapshots - - network.lopf(snapshots=snapshots,solver_name=solver_name) - - - results_folder_name = "results" - - - network.export_to_csv_folder(results_folder_name) - - good_results_filename = os.path.join(csv_folder_name,"results","generators-p.csv") - - good_arr = pd.read_csv(good_results_filename,index_col=0).values - - print(good_arr) - - results_filename = os.path.join(results_folder_name,"generators-p.csv") - - - arr = pd.read_csv(results_filename,index_col=0).values - - - print(arr) - + for solver_name, pyomo in product(["cbc", "glpk"], [True, False]): + solver_name = "glpk" - np.testing.assert_array_almost_equal(arr,good_arr) + n.lopf(solver_name=solver_name, pyomo=pyomo) + equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) if __name__ == "__main__": From f0dffdef109b2475b2389fa80b2bb5744cc28620 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 14:04:18 +0200 Subject: [PATCH 010/111] opt_lm: fix logging handling --- pypsa/opt_lm.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pypsa/opt_lm.py b/pypsa/opt_lm.py index 6fa7bc941..cea882998 100644 --- a/pypsa/opt_lm.py +++ b/pypsa/opt_lm.py @@ -453,16 +453,15 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html if (solver_logfile is not None) and (solver_options is not None): solver_options["logfile"] = solver_logfile - logging.disable() - m = gurobipy.read(problem_fn) + # disable logging for this part, as gurobi output is doubled otherwise + logging.disable(50) + m = gurobipy.read(problem_fn) if solver_options is not None: for key, value in solver_options.items(): m.setParam(key, value) - if warmstart: m.read(warmstart) - m.optimize() logging.disable(1) From 771dc946c91e2eebc024d21fcd68ff3ed8985321 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 14:20:04 +0200 Subject: [PATCH 011/111] opf_lm: fi disable logging too disable test for python 2.7 --- .travis.yml | 4 ++-- pypsa/opf_lm.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index b95580f0a..ace811d36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,8 +6,8 @@ sudo: false # Use container-based infrastructure matrix: include: - - env: - - PYTHON_VERSION="2.7" + # - env: + # - PYTHON_VERSION="2.7" - env: - PYTHON_VERSION="3.6" - env: diff --git a/pypsa/opf_lm.py b/pypsa/opf_lm.py index 95cccf5d7..058282230 100644 --- a/pypsa/opf_lm.py +++ b/pypsa/opf_lm.py @@ -557,7 +557,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", raise NotImplementedError("Only the kirchhoff formulation is supported") #disable logging because multiple slack bus calculations, keep output clean - logging.disable() + logging.disable(50) snapshots = _as_snapshots(n, snapshots) n.calculate_dependent_values() n.determine_network_topology() From a50f43c77c48a356ac91af04d0ed554654b40f1a Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Oct 2019 17:00:48 +0200 Subject: [PATCH 012/111] add solve.py: Start to restructure --- pypsa/__init__.py | 2 +- pypsa/components.py | 6 +- pypsa/{opf_lm.py => opf_lowmemory.py} | 17 +- pypsa/{opt_lm.py => opt_lowmemory.py} | 257 +++-- pypsa/pf.py | 6 +- pypsa/solve.py | 1323 +++++++++++++++++++++++++ 6 files changed, 1506 insertions(+), 105 deletions(-) rename pypsa/{opf_lm.py => opf_lowmemory.py} (98%) rename pypsa/{opt_lm.py => opt_lowmemory.py} (72%) create mode 100644 pypsa/solve.py diff --git a/pypsa/__init__.py b/pypsa/__init__.py index 2d15fbb95..b44764a68 100644 --- a/pypsa/__init__.py +++ b/pypsa/__init__.py @@ -26,7 +26,7 @@ from __future__ import absolute_import from . import components, descriptors -from . import pf, opf, plot, networkclustering, io, contingency, geo, opf_lm, opt_lm +from . import (pf, opf, opt, plot, networkclustering, io, contingency, geo, solve) from .components import Network, SubNetwork diff --git a/pypsa/components.py b/pypsa/components.py index 53fe41587..fbb53249f 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -67,7 +67,7 @@ from .opf import network_lopf, network_opf -from .opf_lm import network_lopf as network_lopf_lm +from .solve import network_lopf as network_lopf_lowmem from .plot import plot, iplot @@ -488,7 +488,7 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, if pyomo: return network_lopf(self, **args) else: - return network_lopf_lm(self, **args) + return network_lopf_lowmem(self, **args) @@ -917,6 +917,8 @@ def determine_network_topology(self): for sub in self.sub_networks.obj: find_cycles(sub) + sub.find_bus_controls() + def iterate_components(self, components=None, skip_empty=True): if components is None: diff --git a/pypsa/opf_lm.py b/pypsa/opf_lowmemory.py similarity index 98% rename from pypsa/opf_lm.py rename to pypsa/opf_lowmemory.py index 058282230..7f53e2d0a 100644 --- a/pypsa/opf_lm.py +++ b/pypsa/opf_lowmemory.py @@ -16,15 +16,15 @@ """nomopyomo: build optimisation problems from PyPSA networks without Pyomo. nomopyomo = no more Pyomo.""" -from .opt_lm import (get_bounds_pu, get_extendable_i, - get_non_extendable_i, write_bound, write_constraint, - set_conref, set_varref, get_con, get_var, lookup, - nominals, reset_counter, expand_series, join_exprs, linexpr) +from .opt_lowmemory import (get_bounds_pu, get_extendable_i, linexpr, + get_non_extendable_i, write_bound, write_constraint, + set_conref, set_varref, get_con, get_var, lookup, + nominals, reset_counter, expand_series, join_exprs) from .pf import (find_cycles as find_cycles, _as_snapshots, get_switchable_as_dense as get_as_dense) -from . import opt_lm +from . import opt_lowmemory import pandas as pd import numpy as np @@ -179,7 +179,6 @@ def cycle_flow(ds): constraints = [] for sub in n.sub_networks.obj: - find_cycles(sub) C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) if C.empty: continue @@ -557,13 +556,9 @@ def network_lopf(n, snapshots=None, solver_name="cbc", raise NotImplementedError("Only the kirchhoff formulation is supported") #disable logging because multiple slack bus calculations, keep output clean - logging.disable(50) snapshots = _as_snapshots(n, snapshots) n.calculate_dependent_values() n.determine_network_topology() - for sub in n.sub_networks.obj: - sub.find_bus_controls() - logging.disable(1) if solver_logfile is None: solver_logfile = "test.log" @@ -579,7 +574,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", else: logger.info("Solve linear problem") - solve = getattr(opt_lm, f'run_and_read_{solver_name}') + solve = getattr(opt_lowmemory, f'run_and_read_{solver_name}') res = solve(n, n.problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart, store_basis) status, termination_condition, variables_sol, constraints_dual, obj = res diff --git a/pypsa/opt_lm.py b/pypsa/opt_lowmemory.py similarity index 72% rename from pypsa/opt_lm.py rename to pypsa/opt_lowmemory.py index cea882998..fad06de90 100644 --- a/pypsa/opt_lm.py +++ b/pypsa/opt_lowmemory.py @@ -13,9 +13,8 @@ from pandas import IndexSlice as idx -lookup = pd.read_csv(os.path.dirname(__file__) + '/variables.csv', - index_col=['component', 'variable']) -#prefix = lookup.droplevel(1).prefix[lambda ds: ~ds.index.duplicated()] +lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), + index_col=['component', 'variable']) nominals = lookup.query('nominal').reset_index(level='variable').variable # ============================================================================= @@ -493,94 +492,176 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, constraints_dual, objective) -#From https://github.com/bodono/scs-python -def run_and_read_scs(n, problem_fn, solution_fn, solver_logfile, - solver_options, keep_files, warmstart=None, - store_basis=True): - # Follow https://stackoverflow.com/questions/38647230/get-constraints\ - # -in-matrix-format-from-gurobipy - import scipy as sc - from scipy.sparse import csc_matrix as csc - import scs +# ============================================================================= +# test/double-check constraints +# ============================================================================= - m = gurobipy.read(problem_fn) - dvars = pd.Series(m.getVars()) - obj_coeffs = np.array(m.getAttr('Obj', dvars)) - - constrs = pd.DataFrame({'Con': m.getConstrs()}) - constrs['sense'] = constrs.Con.apply(lambda c: c.Sense).astype('category')\ - .cat.set_categories(['=', '>', '<']) - constrs = constrs.sort_values('sense').reset_index(drop=True) - constrs['sign'] = constrs.sense.replace({'=':1, '>':-1, '<':1}) - constrs['rhs'] = constrs.Con.apply(lambda c: c.RHS) * constrs.sign - - var_indices = {v: i for i, v in enumerate(dvars)} - def get_expr_coos(expr): - for i in range(expr.size()): - dvar = expr.getVar(i) - yield expr.getCoeff(i), var_indices[dvar] - - def get_matrix_coo(m): - for row_idx, (con, sign) in enumerate(constrs[['Con', 'sign']].values): - for coeff, col_idx in get_expr_coos(m.getRow(con)): - yield row_idx, col_idx, sign * coeff - - condata = pd.DataFrame(get_matrix_coo(m), columns=['row', 'col', 'coeff']) - A = csc((condata.coeff, (condata.row, condata.col))) - #extend A and rhs by variable bound constraints - ub = dvars.apply(lambda v: v.UB)[lambda x: x!=1e100] - lb = dvars.apply(lambda v: v.LB)[lambda x: x!=-1e100] - A_ub = csc((np.ones(len(ub)), (range(len(ub)), ub.index))) - A_lb = csc((-np.ones(len(lb)), (range(len(lb)), lb.index))) - - A = sc.sparse.vstack((A, A_ub, A_lb)) - b = np.hstack((constrs.rhs, ub, -lb)) - - # initialize and solve - data = {'A': A, 'b': b, 'c': obj_coeffs} - N_eq_con = int((constrs.sense == '=').sum()) - N_in_con = len(b) - N_eq_con - K = {'f': N_eq_con, 'l': N_in_con} - sol = scs.solve(data, K, **solver_options) - - varnames = dvars.apply(lambda v: v.VarName) - connames = constrs.Con.apply(lambda c: c.ConstrName) - variables_sol = pd.Series(sol['x'], varnames) - constraints_dual = - pd.Series(sol['y'][:len(constrs)], connames) - objective = sol['info']['pobj'] - status = sol['info']['statusVal'] -# import pdb; pdb.set_trace() - termination_condition = 'optimal' if status <= 3 else 'non-optimal' - del m +def describe_storage_unit_contraints(n): + """ + Checks whether all storage units are balanced over time. This function + requires the network to contain the separate variables p_store and + p_dispatch, since they cannot be reconstructed from p. The latter results + from times tau where p_store(tau) > 0 **and** p_dispatch(tau) > 0, which + is allowed (even though not economic). Therefor p_store is necessarily + equal to negative entries of p, vice versa for p_dispatch. + """ + sus = n.storage_units + sus_i = sus.index + if sus_i.empty: return + sns = n.snapshots + c = 'StorageUnit' + pnl = n.pnl(c) + + description = {} + + eh = expand_series(n.snapshot_weightings, sus_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + dispatch_eff = expand_series(n.df(c).efficiency_dispatch, sns).T + store_eff = expand_series(n.df(c).efficiency_store, sns).T + inflow = get_as_dense(n, c, 'inflow') * eh + spill = eh[pnl.spill.columns] * pnl.spill + + description['Spillage Limit'] = pd.Series({'min': + (inflow[spill.columns] - spill).min().min()}) + + if 'p_store' in pnl: + soc = pnl.state_of_charge + + store = store_eff * eh * pnl.p_store#.clip(upper=0) + dispatch = 1/dispatch_eff * eh * pnl.p_dispatch#(lower=0) + start = soc.iloc[-1].where(sus.cyclic_state_of_charge, + sus.state_of_charge_initial) + previous_soc = stand_eff * soc.shift().fillna(start) + + + reconstructed = (previous_soc.add(store, fill_value=0) + .add(inflow, fill_value=0) + .add(-dispatch, fill_value=0) + .add(-spill, fill_value=0)) + description['SOC Balance StorageUnit'] = ((reconstructed - soc) + .unstack().describe()) + else: + logging.info('Storage Unit SOC balance not reconstructable as no ' + 'p_store and p_dispatch in n.storage_units_t.') + return pd.concat(description, axis=1, sort=False) + + +def describe_nodal_balance_constraint(n): + """ + Helper function to double check whether network flow is balanced + """ + network_injection = pd.concat( + [n.pnl(c)[f'p{inout}'].rename(columns=n.df(c)[f'bus{inout}']) + for inout in (0, 1) for c in ('Line', 'Transformer')], axis=1)\ + .groupby(level=0, axis=1).sum() + return (n.buses_t.p - network_injection).unstack().describe()\ + .to_frame('Nodal Balance Constr.') + +def describe_upper_dispatch_constraints(n): + ''' + Recalculates the minimum gap between operational status and nominal capacity + ''' + description = {} + key = ' Upper Limit' + for c, attr in nominals.items(): + dispatch_attr = 'p0' if c in ['Line', 'Transformer', 'Link'] else attr[0] + description[c + key] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') - + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_lower_dispatch_constraints(n): + description = {} + key = ' Lower Limit' + for c, attr in nominals.items(): + if c in ['Line', 'Transformer', 'Link']: + dispatch_attr = 'p0' + description[c] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + else: + dispatch_attr = attr[0] + description[c + key] = pd.Series({'min': + (-n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_min_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_store_contraints(n): + """ + Checks whether all stores are balanced over time. + """ + stores = n.stores + stores_i = stores.index + if stores_i.empty: return + sns = n.snapshots + c = 'Store' + pnl = n.pnl(c) + + eh = expand_series(n.snapshot_weightings, stores_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + + start = pnl.e.iloc[-1].where(stores.e_cyclic, stores.e_initial) + previous_e = stand_eff * pnl.e.shift().fillna(start) + + return (previous_e - pnl.p - pnl.e).unstack().describe()\ + .to_frame('SOC Balance Store') + + +def describe_cycle_constraints(n): + weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + + def cycle_flow(sub): + C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + if C.empty: + return None + C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + return C_weighted.apply(lambda ds: ds @ n.lines_t.p0[ds.index].T) + + return pd.concat([cycle_flow(sub) for sub in n.sub_networks.obj], axis=0)\ + .unstack().describe().to_frame('Cycle Constr.') + - return (status, termination_condition, variables_sol, - constraints_dual, objective) +def constraint_stats(n, round_digit=1e-30): + """ + Post-optimization function to recalculate gap statistics of different + constraints. For inequality constraints only the minimum of lhs - rhs, with + lhs >= rhs is returned. + """ + return pd.concat([describe_cycle_constraints(n), + describe_store_contraints(n), + describe_storage_unit_contraints(n), + describe_nodal_balance_constraint(n), + describe_lower_dispatch_constraints(n), + describe_upper_dispatch_constraints(n)], + axis=1, sort=False) + +def check_constraints(n, tol=1e-3): + """ + Post-optimization test function to double-check most of the lopf + constraints. For relevant equaility constraints, it test whether the + deviation between lhs and rhs is below the given tolerance. For inequality + constraints, it test whether the inequality is violated with a higher + value then the tolerance. + + Parameters + ---------- + n : pypsa.Network + tol : float + Gap tolerance + + Returns AssertionError if tolerance is exceeded. + + """ + stats = constraint_stats(n).rename(index=str.title) + condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T + assert condition.empty, (f'The following constraint(s) are exceeding the ' + f'given tolerance of {tol}: \n{condition}') -#%% -# if warmstart: -# if network is None: -# ValueError('Network must be given to set a warmstart') -# n = network -# for (c, attr), pnl in n.variables.pnl.items(): -# if pnl: -# attr_name = 'p0' if c in n.branch_components else attr -# start = n.pnl(c)[attr_name].stack() -# else: -# if (attr + '_opt') not in n.df(c): -# continue -# start = n.df(c)[attr + '_opt'].dropna() -# var = get_var(n, c, attr) -# var = var.stack().dropna() if pnl else var.dropna() -# var, start = var.align(start, join='inner') -# for v, s in np.column_stack((var.values, start.values)): -# m.getVarByName(v).PStart = s -# for (c, attr), pnl in n.constraints.pnl.items(): -# start = n.pnl(c)[attr].stack() if pnl else n.df(c)[attr].dropna() -# con = get_con(n, c, attr) -# con = con.stack().dropna() if pnl else con.dropna() -# con, start = con.align(start, join='inner') -# for cc, s in np.column_stack((con.values, start.values)): -# m.getConstrByName(cc).DStart = s \ No newline at end of file diff --git a/pypsa/pf.py b/pypsa/pf.py index d326065c3..2f6a1281f 100644 --- a/pypsa/pf.py +++ b/pypsa/pf.py @@ -549,7 +549,7 @@ def find_slack_bus(sub_network): gens = sub_network.generators() if len(gens) == 0: - logger.warning("No generators in sub-network {}, better hope power is already balanced".format(sub_network.name)) +# logger.warning("No generators in sub-network {}, better hope power is already balanced".format(sub_network.name)) sub_network.slack_generator = None sub_network.slack_bus = sub_network.buses_i()[0] @@ -574,7 +574,7 @@ def find_slack_bus(sub_network): #also put it into the dataframe sub_network.network.sub_networks.at[sub_network.name,"slack_bus"] = sub_network.slack_bus - logger.info("Slack bus for sub-network {} is {}".format(sub_network.name, sub_network.slack_bus)) +# logger.info("Slack bus for sub-network {} is {}".format(sub_network.name, sub_network.slack_bus)) def find_bus_controls(sub_network): @@ -833,7 +833,7 @@ def find_tree(sub_network, weight='x_pu'): #find bus with highest degree to use as slack tree_slack_bus, slack_degree = max(degree(sub_network.tree), key=itemgetter(1)) - logger.info("Tree slack bus is %s with degree %d.", tree_slack_bus, slack_degree) +# logger.info("Tree slack bus is %s with degree %d.", tree_slack_bus, slack_degree) #determine which buses are supplied in tree through branch from slack diff --git a/pypsa/solve.py b/pypsa/solve.py new file mode 100644 index 000000000..4995db817 --- /dev/null +++ b/pypsa/solve.py @@ -0,0 +1,1323 @@ +## Copyright 2019 Tom Brown (KIT), Fabian Hofmann (FIAS) + +## This program is free software; you can redistribute it and/or +## modify it under the terms of the GNU General Public License as +## published by the Free Software Foundation; either version 3 of the +## License, or (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +"""nomopyomo: build optimisation problems from PyPSA networks without +Pyomo. nomopyomo = no more Pyomo.""" + +from .descriptors import get_switchable_as_dense as get_as_dense +from .pf import _as_snapshots + +import pandas as pd +from pandas import IndexSlice as idx +import numpy as np + +import gc, string, random, time, os, re, subprocess, io + +import logging +logger = logging.getLogger(__name__) + + +lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), + index_col=['component', 'variable']) +nominals = lookup.query('nominal').reset_index(level='variable').variable + +# ============================================================================= +# writing functions +# ============================================================================= + +xCounter = 0 +cCounter = 0 +def reset_counter(): + global xCounter, cCounter + xCounter, cCounter = 0, 0 + + +def write_bound(n, lower, upper, axes=None): + """ + Writer function for writing out mutliple variables at a time. If lower and + upper are floats it demands to give pass axes, a tuple of (index, columns) + or (index), for creating the variable of same upper and lower bounds. + Return a series or frame with variable references. + """ + axes = [axes] if isinstance(axes, pd.Index) else axes + if axes is None: + axes, shape = broadcasted_axes(lower, upper) + else: + shape = tuple(map(len, axes)) + ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series + length = np.prod(shape) + global xCounter + xCounter += length + variables = np.array([f'x{x}' for x in range(xCounter - length, xCounter)], + dtype=object).reshape(shape) + lower, upper = _str_array(lower), _str_array(upper) + for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): + n.bounds_f.write(s) + return ser_or_frame(variables, *axes) + +def write_constraint(n, lhs, sense, rhs, axes=None): + """ + Writer function for writing out mutliple constraints to the corresponding + constraints file. If lower and upper are numpy.ndarrays it axes must not be + None but a tuple of (index, columns) or (index). + Return a series or frame with constraint references. + """ + axes = [axes] if isinstance(axes, pd.Index) else axes + if axes is None: + axes, shape = broadcasted_axes(lhs, rhs) + else: + shape = tuple(map(len, axes)) + ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series + length = np.prod(shape) + global cCounter + cCounter += length + cons = np.array([f'c{x}' for x in range(cCounter - length, cCounter)], + dtype=object).reshape(shape) + if isinstance(sense, str): + sense = '=' if sense == '==' else sense + lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) + for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): + n.constraints_f.write(c) + return ser_or_frame(cons, *axes) + + +# ============================================================================= +# helpers, helper functions +# ============================================================================= + +var_ref_suffix = '_varref' # after solving replace with '_opt' +con_ref_suffix = '_conref' # after solving replace with '' + +def broadcasted_axes(*dfs): + """ + Helper function which, from a collection of arrays, series, frames and other + values, retrieves the axes of series and frames which result from + broadcasting operations. It checks whether index and columns of given + series and frames, repespectively, are aligned. Using this function allows + to subsequently use pure numpy operations and keep the axes in the + background. + """ + axes = [] + shape = () + for df in dfs: + if isinstance(df, (pd.Series, pd.DataFrame)): + if len(axes): + assert (axes[-1] == df.axes[-1]).all(), ('Series or DataFrames ' + 'are not aligned') + axes = df.axes if len(df.axes) > len(axes) else axes + shape = tuple(map(len, axes)) + return axes, shape + + +def linexpr(*tuples, return_axes=False): + """ + Elementwise concatenation of tuples in the form (coefficient, variables). + Coefficient and variables can be arrays, series or frames. Returns + a np.ndarray of strings. If return_axes is set to True and a pd.Series or + pd.DataFrame was past, the corresponding index (and column if existent) is + returned additionaly. + + Parameters + ---------- + tulples: tuple of tuples + Each tuple must of the form (coeff, var), where + * coeff is a numerical value, or a numeical array, series, frame + * var is a str or a array, series, frame of variable strings + return_axes: Boolean, default False + Whether to return index and column (if existent) + + Example + ------- + >>> coeff1 = 1 + >>> var1 = pd.Series(['a1', 'a2', 'a3']) + >>> coeff2 = pd.Series([-0.5, -0.3, -1]) + >>> var2 = pd.Series(['b1', 'b2', 'b3']) + + >>> linexpr((coeff1, var1), (coeff2, var2)) + array(['+1.0 a1\n-0.5 b1\n', '+1.0 a2\n-0.3 b2\n', '+1.0 a3\n-1.0 b3\n'], + dtype=object) + + + For turning the result into a series or frame again: + >>> pd.Series(*linexpr((coeff1, var1), (coeff2, var2), return_axes=True)) + 0 +1.0 a1\n-0.5 b1\n + 1 +1.0 a2\n-0.3 b2\n + 2 +1.0 a3\n-1.0 b3\n + dtype: object + + This can also be applied to DataFrames, using + pd.DataFrame(*linexpr(..., return_axes=True)). + """ + axes, shape = broadcasted_axes(*sum(tuples, ())) + expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) + if np.prod(shape): + for coeff, var in tuples: + expr += _str_array(coeff) + _str_array(var) + '\n' + if return_axes: + return (expr, *axes) + return expr + + +def _str_array(array): + if isinstance(array, (float, int)): + array = f'+{float(array)} ' if array >= 0 else f'{float(array)} ' + elif isinstance(array, (pd.Series, pd.DataFrame)): + array = array.values + if isinstance(array, np.ndarray): + if not (array.dtype == object) and array.size: + signs = pd.Series(array) if array.ndim == 1 else pd.DataFrame(array) + signs = (signs.pipe(np.sign) + .replace([0, 1, -1], ['+', '+', '-']).values) + array = signs + abs(array).astype(str) + ' ' + return array + + +def join_exprs(df): + """ + Helper function to join arrays, series or frames of stings together. + """ + return ''.join(np.asarray(df).flatten()) + +def expand_series(ser, columns): + """ + Helper function to fastly expand a series to a dataframe with according + column axis and every single column being the equal to the given series. + """ + return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) + +# ============================================================================= +# 'getter' functions +# ============================================================================= + +def get_extendable_i(n, c): + """ + Getter function. Get the index of extendable elements of a given component. + """ + return n.df(c)[lambda ds: + ds[nominals[c] + '_extendable']].index + +def get_non_extendable_i(n, c): + """ + Getter function. Get the index of non-extendable elements of a given + component. + """ + return n.df(c)[lambda ds: + ~ds[nominals[c] + '_extendable']].index + +def get_bounds_pu(n, c, sns, index=slice(None), attr=None): + """ + Getter function to retrieve the per unit bounds of a given compoent for + given snapshots and possible subset of elements (e.g. non-extendables). + Depending on the attr you can further specify the bounds of the variable + you are looking at, e.g. p_store for storage units. + + Parameters + ---------- + n : pypsa.Network + c : string + Component name, e.g. "Generator", "Line". + sns : pandas.Index/pandas.DateTimeIndex + set of snapshots for the bounds + index : pd.Index, default None + Subset of the component elements. If None (default) bounds of all + elements are returned. + attr : string, default None + attribute name for the bounds, e.g. "p", "s", "p_store" + + """ + min_pu_str = nominals[c].replace('nom', 'min_pu') + max_pu_str = nominals[c].replace('nom', 'max_pu') + + max_pu = get_as_dense(n, c, max_pu_str, sns) + if c in n.passive_branch_components: + min_pu = - max_pu + elif c == 'StorageUnit': + min_pu = pd.DataFrame(0, max_pu.index, max_pu.columns) + if attr == 'p_store': + max_pu = - get_as_dense(n, c, min_pu_str, sns) + if attr == 'state_of_charge': + max_pu = expand_series(n.df(c).max_hours, sns).T + min_pu = pd.DataFrame(0, *max_pu.axes) + else: + min_pu = get_as_dense(n, c, min_pu_str, sns) + return min_pu[index], max_pu[index] + + +# ============================================================================= +# references to vars and cons, rewrite this part to not store every reference +# ============================================================================= + +def _add_reference(n, df, c, attr, suffix, pnl=True): + attr_name = attr + suffix + if pnl: + if attr_name in n.pnl(c): + n.pnl(c)[attr_name][df.columns] = df + else: + n.pnl(c)[attr_name] = df + if n.pnl(c)[attr_name].shape[1] == n.df(c).shape[0]: + n.pnl(c)[attr_name] = n.pnl(c)[attr_name].reindex(columns=n.df(c).index) + else: + n.df(c).loc[df.index, attr_name] = df + +def set_varref(n, variables, c, attr, pnl=True, spec=''): + """ + Sets variable references to the network. + If pnl is False it stores a series of variable names in the static + dataframe of the given component. The columns name is then given by the + attribute name attr and the globally define var_ref_suffix. + If pnl is True if stores the given frame of references in the component + dict of time-depending quantities, e.g. network.generators_t . + """ + if not variables.empty: + if ((c, attr) in n.variables.index) and (spec != ''): + n.variables.at[idx[c, attr], 'specification'] += ', ' + spec + else: + n.variables.loc[idx[c, attr], :] = [pnl, spec] + _add_reference(n, variables, c, attr, var_ref_suffix, pnl=pnl) + +def set_conref(n, constraints, c, attr, pnl=True, spec=''): + """ + Sets constraint references to the network. + If pnl is False it stores a series of constraints names in the static + dataframe of the given component. The columns name is then given by the + attribute name attr and the globally define con_ref_suffix. + If pnl is True if stores the given frame of references in the component + dict of time-depending quantities, e.g. network.generators_t . + """ + if not constraints.empty: + if ((c, attr) in n.constraints.index) and (spec != ''): + n.constraints.at[idx[c, attr], 'specification'] += ', ' + spec + else: + n.constraints.loc[idx[c, attr], :] = [pnl, spec] + _add_reference(n, constraints, c, attr, con_ref_suffix, pnl=pnl) + + +def get_var(n, c, attr, pop=False): + ''' + Retrieves variable references for a given static or time-depending + attribute of a given component. The function looks into n.variables to + detect whether the variable is a time-dependent or static. + + Parameters + ---------- + n : pypsa.Network + c : str + component name to which the constraint belongs + attr: str + attribute name of the constraints + + Example + ------- + get_var(n, 'Generator', 'p') + + ''' + if n.variables.at[idx[c, attr], 'pnl']: + if pop: + return n.pnl(c).pop(attr + var_ref_suffix) + return n.pnl(c)[attr + var_ref_suffix] + else: + if pop: + return n.df(c).pop(attr + var_ref_suffix) + return n.df(c)[attr + var_ref_suffix] + + +def get_con(n, c, attr, pop=False): + """ + Retrieves constraint references for a given static or time-depending + attribute of a give component. + + Parameters + ---------- + n : pypsa.Network + c : str + component name to which the constraint belongs + attr: str + attribute name of the constraints + + Example + ------- + get_con(n, 'Generator', 'mu_upper') + """ + if n.constraints.at[idx[c, attr], 'pnl']: + if pop: + return n.pnl(c).pop(attr + con_ref_suffix) + return n.pnl(c)[attr + con_ref_suffix] + else: + if pop: + return n.df(c).pop(attr + con_ref_suffix) + return n.df(c)[attr + con_ref_suffix] + + +# ============================================================================= +# solvers +# ============================================================================= + +def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + #printingOptions is about what goes in solution file + command = f"cbc -printingOptions all -import {problem_fn} " + if warmstart: + command += f'-basisI {warmstart} ' + if (solver_options is not None) and (solver_options != {}): + command += solver_options + command += f"-solve -solu {solution_fn} " + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + command += f'-basisO {n.basis_fn} ' + + if solver_logfile is None: + os.system(command) + else: + result = subprocess.run(command.split(' '), stdout=subprocess.PIPE) + print(result.stdout.decode('utf-8'), file=open(solver_logfile, 'w')) + + f = open(solution_fn,"r") + data = f.readline() + f.close() + + if data.startswith("Optimal - objective value"): + status = "optimal" + termination_condition = status + objective = float(data[len("Optimal - objective value "):]) + elif "Infeasible" in data: + termination_condition = "infeasible" + else: + termination_condition = "other" + + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + sol = pd.read_csv(solution_fn, header=None, skiprows=[0], + sep=r'\s+', usecols=[1,2,3], index_col=0) + variables_b = sol.index.str[0] == 'x' + variables_sol = sol[variables_b][2] + constraints_dual = sol[~variables_b][3] + + if not keep_files: + os.system("rm "+ problem_fn) + os.system("rm "+ solution_fn) + + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + +def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + # for solver_options lookup https://kam.mff.cuni.cz/~elias/glpk.pdf + command = (f"glpsol --lp {problem_fn} --output {solution_fn}") + if solver_logfile is not None: + command += f' --log {solver_logfile}' + if warmstart: + command += f' --ini {warmstart}' + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + command += f' -w {n.basis_fn}' + if (solver_options is not None) and (solver_options != {}): + command += solver_options + + os.system(command) + + data = open(solution_fn) + info = '' + linebreak = False + while not linebreak: + line = data.readline() + linebreak = line == '\n' + info += line + info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] + status = info.Status.lower().strip() + objective = float(re.sub('[^0-9]+', '', info.Objective)) + termination_condition = status + + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + sol = pd.read_fwf(data).set_index('Row name') + variables_b = sol.index.str[0] == 'x' + variables_sol = sol[variables_b]['Activity'].astype(float) + sol = sol[~variables_b] + constraints_b = sol.index.str[0] == 'c' + constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') + .fillna(0)) + + if not keep_files: + os.system("rm "+ problem_fn) + os.system("rm "+ solution_fn) + + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + +def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart=None, + store_basis=True): + import gurobipy + # for solver options see + # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html + if (solver_logfile is not None) and (solver_options is not None): + solver_options["logfile"] = solver_logfile + + # disable logging for this part, as gurobi output is doubled otherwise + logging.disable(50) + m = gurobipy.read(problem_fn) + if solver_options is not None: + for key, value in solver_options.items(): + m.setParam(key, value) + if warmstart: + m.read(warmstart) + m.optimize() + logging.disable(1) + + if store_basis: + n.basis_fn = solution_fn.replace('.sol', '.bas') + try: + m.write(n.basis_fn) + except gurobipy.GurobiError: + logging.info('No model basis stored') + del n.basis_fn + + if not keep_files: + os.system("rm "+ problem_fn) + + Status = gurobipy.GRB.Status + statusmap = {getattr(Status, s) : s.lower() for s in Status.__dir__() + if not s.startswith('_')} + status = statusmap[m.status] + termination_condition = status + if termination_condition != "optimal": + return status, termination_condition, None, None, None + + variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}) + constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) + termination_condition = status + objective = m.ObjVal + del m + return (status, termination_condition, variables_sol, + constraints_dual, objective) + + + +# ============================================================================= +# Setting up the problem +# ============================================================================= + +def define_nominal_for_extendable_variables(n, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + lower = n.df(c)[attr+'_min'][ext_i] + upper = n.df(c)[attr+'_max'][ext_i] + variables = write_bound(n, lower, upper) + set_varref(n, variables, c, attr, pnl=False) + + +def define_dispatch_for_extendable_variables(n, sns, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + variables = write_bound(n, -np.inf, np.inf, axes=[sns, ext_i]) + set_varref(n, variables, c, attr, pnl=True, spec='extendables') + + +def define_dispatch_for_non_extendable_variables(n, sns, c, attr): + fix_i = get_non_extendable_i(n, c) + if fix_i.empty: return + nominal_fix = n.df(c)[nominals.at[c]][fix_i] + min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) + lower = min_pu.mul(nominal_fix) + upper = max_pu.mul(nominal_fix) + variables = write_bound(n, lower, upper) + set_varref(n, variables, c, attr, pnl=True, spec='nonextendables') + + +def define_dispatch_for_extendable_constraints(n, sns, c, attr): + ext_i = get_extendable_i(n, c) + if ext_i.empty: return + min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) + operational_ext_v = get_var(n, c, attr)[ext_i] + nominal_v = get_var(n, c, nominals.at[c])[ext_i] + rhs = 0 + + lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), + return_axes=True) + constraints = write_constraint(n, lhs, '>=', rhs, axes) + set_conref(n, constraints, c, 'mu_upper', pnl=True, spec=attr) + + lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), + return_axes=True) + constraints = write_constraint(n, lhs, '<=', rhs, axes) + set_conref(n, constraints, c, 'mu_lower', pnl=True, spec=attr) + + +def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): + if pnl: + if attr + '_set' not in n.pnl(c): return + fix = n.pnl(c)[attr + '_set'].unstack().dropna() + if fix.empty: return + lhs = linexpr((1, get_var(n, c, attr).unstack()[fix.index])) + constraints = write_constraint(n, lhs, '=', fix).unstack().T + else: + if attr + '_set' not in n.df(c): return + fix = n.df(c)[attr + '_set'].dropna() + if fix.empty: return + lhs = linexpr((1, get_var(n, c, attr)[fix.index])) + constraints = write_constraint(n, lhs, '=', fix) + set_conref(n, constraints, c, f'mu_{attr}_set', pnl) + + +def define_ramp_limit_constraints(n, sns): + c = 'Generator' + rup_i = n.df(c).query('ramp_limit_up == ramp_limit_up').index + rdown_i = n.df(c).query('ramp_limit_down == ramp_limit_down').index + if rup_i.empty & rdown_i.empty: + return + p = get_var(n, c, 'p').loc[sns[1:]] + p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]] + + #fix up + gens_i = rup_i & get_non_extendable_i(n, c) + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + return_axes=True)) + rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') + constraints = write_constraint(n, lhs, '<=', rhs) + set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='nonextendables') + + #ext up + gens_i = rup_i & get_extendable_i(n, c) + limit_pu = n.df(c)['ramp_limit_up'][gens_i] + p_nom = get_var(n, c, 'p_nom')[gens_i] + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (-limit_pu, p_nom), return_axes=True)) + constraints = write_constraint(n, lhs, '<=', 0) + set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='extendables') + + #fix down + gens_i = rdown_i & get_non_extendable_i(n, c) + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + return_axes=True)) + rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') + constraints = write_constraint(n, lhs, '>=', rhs) + set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='nonextendables') + + #ext down + gens_i = rdown_i & get_extendable_i(n, c) + limit_pu = n.df(c)['ramp_limit_down'][gens_i] + p_nom = get_var(n, c, 'p_nom')[gens_i] + lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_pu, p_nom), return_axes=True)) + constraints = write_constraint(n, lhs, '>=', 0) + set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='extendables') + + +def define_nodal_balance_constraints(n, sns): + + def bus_injection(c, attr, groupcol='bus', sign=1): + #additional sign only necessary for branches in reverse direction + if 'sign' in n.df(c): + sign = sign * n.df(c).sign + vals = linexpr((sign, get_var(n, c, attr)), return_axes=True) + return pd.DataFrame(*vals).rename(columns=n.df(c)[groupcol]) + + # one might reduce this a bit by using n.branches and lookup + args = [['Generator', 'p'], ['Store', 'p'], ['StorageUnit', 'p_dispatch'], + ['StorageUnit', 'p_store', 'bus', -1], ['Line', 's', 'bus0', -1], + ['Line', 's', 'bus1', 1], ['Transformer', 's', 'bus0', -1], + ['Transformer', 's', 'bus1', 1], ['Link', 'p', 'bus0', -1], + ['Link', 'p', 'bus1', n.links.efficiency]] + args = [arg for arg in args if not n.df(arg[0]).empty] + + lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) + .groupby(axis=1, level=0) + .agg(lambda x: ''.join(x.values)) + .reindex(columns=n.buses.index)) + sense = '=' + rhs = ((- n.loads_t.p_set * n.loads.sign) + .groupby(n.loads.bus, axis=1).sum() + .reindex(columns=n.buses.index, fill_value=0)) + constraints = write_constraint(n, lhs, sense, rhs) + set_conref(n, constraints, 'Bus', 'nodal_balance') + + +def define_kirchhoff_constraints(n): + weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + + def cycle_flow(ds): + ds = ds[lambda ds: ds!=0.].dropna() + vals = linexpr((ds, get_var(n, 'Line', 's')[ds.index])) + '\n' + return vals.sum(1) + + constraints = [] + for sub in n.sub_networks.obj: + C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + if C.empty: + continue + C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + con = write_constraint(n, C_weighted.apply(cycle_flow), '=', 0) + constraints.append(con) + constraints = pd.concat(constraints, axis=1, ignore_index=True) + set_conref(n, constraints, 'Line', 'kirchhoff_voltage') + + +def define_storage_unit_constraints(n, sns): + sus_i = n.storage_units.index + if sus_i.empty: return + c = 'StorageUnit' + #spillage + upper = get_as_dense(n, c, 'inflow').loc[:, lambda df: df.max() > 0] + spill = write_bound(n, 0, upper) + set_varref(n, spill, 'StorageUnit', 'spill') + + #soc constraint previous_soc + p_store - p_dispatch + inflow - spill == soc + eh = expand_series(n.snapshot_weightings, sus_i) #elapsed hours + + eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + eff_dispatch = expand_series(n.df(c).efficiency_dispatch, sns).T + eff_store = expand_series(n.df(c).efficiency_store, sns).T + + soc = get_var(n, c, 'state_of_charge') + cyclic_i = n.df(c).query('cyclic_state_of_charge').index + noncyclic_i = n.df(c).query('~cyclic_state_of_charge').index + + prev_soc_cyclic = soc.shift().fillna(soc.loc[sns[-1]]) + + coeff_var = [(-1, soc), + (-1/eff_dispatch * eh, get_var(n, c, 'p_dispatch')), + (eff_store * eh, get_var(n, c, 'p_store'))] + lhs, *axes = linexpr(*coeff_var, return_axes=True) + + def masked_term(coeff, var, cols): + return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values + + lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) + lhs += masked_term(eff_stand, prev_soc_cyclic, cyclic_i) + lhs += masked_term(eff_stand.loc[sns[1:]], soc.shift().loc[sns[1:]], noncyclic_i) + + rhs = -get_as_dense(n, c, 'inflow').mul(eh) + rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] + + constraints = write_constraint(n, lhs, '==', rhs) + set_conref(n, constraints, c, 'soc') + + +def define_store_constraints(n, sns): + stores_i = n.stores.index + if stores_i.empty: return + c = 'Store' + variables = write_bound(n, -np.inf, np.inf, axes=[sns, stores_i]) + set_varref(n, variables, c, 'p') + + #previous_e - p == e + eh = expand_series(n.snapshot_weightings, stores_i) #elapsed hours + eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + + e = get_var(n, c, 'e') + cyclic_i = n.df(c).query('e_cyclic').index + noncyclic_i = n.df(c).query('~e_cyclic').index + + previous_e_cyclic = e.shift().fillna(e.loc[sns[-1]]) + + coeff_var = [(-eh, get_var(n, c, 'p')), (-1, e)] + + lhs, *axes = linexpr(*coeff_var, return_axes=True) + + def masked_term(coeff, var, cols): + return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values + + lhs += masked_term(eff_stand, previous_e_cyclic, cyclic_i) + lhs += masked_term(eff_stand.loc[sns[1:]], e.shift().loc[sns[1:]], noncyclic_i) + + rhs = pd.DataFrame(0, sns, stores_i) + rhs.loc[sns[0], noncyclic_i] -= n.df(c)['e_initial'][noncyclic_i] + + constraints = write_constraint(n, lhs, '==', rhs) + set_conref(n, constraints, c, 'soc') + + +def define_global_constraints(n, sns): + glcs = n.global_constraints.query('type == "primary_energy"') + for name, glc in glcs.iterrows(): + carattr = glc.carrier_attribute + emissions = n.carriers.query(f'{carattr} != 0')[carattr] + if emissions.empty: continue + gens = n.generators.query('carrier in @emissions.index') + em_pu = gens.carrier.map(emissions)/gens.efficiency + em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T + vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) + lhs = join_exprs(vals) + rhs = glc.constant + + #storage units + sus = n.storage_units.query('carrier in @emissions.index and ' + 'not cyclic_state_of_charge') + sus_i = sus.index + if not sus.empty: + vals = linexpr((-sus.carrier.map(emissions), + get_var(n, 'StorageUnit', 'state_of_charge').loc[sns[-1], sus_i])) + lhs = lhs + '\n' + join_exprs(vals) + rhs -= sus.carrier.map(emissions) @ sus.state_of_charge_initial + + #stores + n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) + stores = n.stores.query('carrier in @emissions.index and not e_cyclic') + if not stores.empty: + vals = linexpr((-stores.carrier.map(n.emissions), + get_var(n, 'Store', 'e').loc[sns[-1], stores.index])) + lhs = lhs + '\n' + join_exprs(vals) + rhs -= stores.carrier.map(emissions) @ stores.state_of_charge_initial + + + con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + #expansion limits + glcs = n.global_constraints.query('type == ' + '"transmission_volume_expansion_limit"') + substr = lambda s: re.sub('[\[\]\(\)]', '', s) + for name, glc in glcs.iterrows(): + carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lines_ext_i = n.lines.query(f'carrier in @carattr ' + 'and s_nom_extendable').index + links_ext_i = n.links.query(f'carrier in @carattr ' + 'and p_nom_extendable').index + linevars = linexpr((n.lines.length[lines_ext_i], + get_var(n, 'Line', 's_nom')[lines_ext_i])) + linkvars = linexpr((n.links.length[links_ext_i], + get_var(n, 'Link', 'p_nom')[links_ext_i])) + lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + sense = glc.sense + rhs = glc.constant + con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + #expansion cost limits + glcs = n.global_constraints.query('type == ' + '"transmission_expansion_cost_limit"') + for name, glc in glcs.iterrows(): + carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lines_ext_i = n.lines.query(f'carrier in @carattr ' + 'and s_nom_extendable').index + links_ext_i = n.links.query(f'carrier in @carattr ' + 'and p_nom_extendable').index + linevars = linexpr((n.lines.capital_cost[lines_ext_i], + get_var(n, 'Line', 's_nom')[lines_ext_i])) + linkvars = linexpr((n.links.capital_cost[links_ext_i], + get_var(n, 'Link', 'p_nom')[links_ext_i])) + lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + sense = glc.sense + rhs = glc.constant + con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) + set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + + +def define_objective(n): + for c, attr in lookup.query('marginal_cost').index: + cost = (get_as_dense(n, c, 'marginal_cost') + .loc[:, lambda ds: (ds != 0).all()] + .mul(n.snapshot_weightings, axis=0)) + if cost.empty: continue + terms = linexpr((cost, get_var(n, c, attr)[cost.columns])) + for t in terms.flatten(): + n.objective_f.write(t) + #investment + for c, attr in nominals.items(): + cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] + if cost.empty: continue + terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' + for t in terms.flatten(): + n.objective_f.write(t) + + + +def prepare_lopf(n, snapshots=None, keep_files=False, + extra_functionality=None): + reset_counter() + + #used in kirchhoff and globals + n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) + + cols = ['component', 'name', 'pnl', 'specification'] + n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) + n.constraints = pd.DataFrame(columns=cols).set_index(cols[:2]) + + snapshots = n.snapshots if snapshots is None else snapshots + start = time.time() + def time_info(message): + logger.info(f'{message} {round(time.time()-start, 2)}s') + + n.identifier = ''.join(random.choice(string.ascii_lowercase) + for i in range(8)) + objective_fn = f"/tmp/objective-{n.identifier}.txt" + constraints_fn = f"/tmp/constraints-{n.identifier}.txt" + bounds_fn = f"/tmp/bounds-{n.identifier}.txt" + n.problem_fn = f"/tmp/test-{n.identifier}.lp" + + n.objective_f = open(objective_fn, mode='w') + n.constraints_f = open(constraints_fn, mode='w') + n.bounds_f = open(bounds_fn, mode='w') + + n.objective_f.write('\* LOPF *\n\nmin\nobj:\n') + n.constraints_f.write("\n\ns.t.\n\n") + n.bounds_f.write("\nbounds\n") + + + for c, attr in lookup.query('nominal and not handle_separately').index: + define_nominal_for_extendable_variables(n, c, attr) + define_fixed_variariable_constraints(n, snapshots, c, attr, pnl=False) + for c, attr in lookup.query('not nominal and not handle_separately').index: + define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) + define_dispatch_for_extendable_variables(n, snapshots, c, attr) + define_dispatch_for_extendable_constraints(n, snapshots, c, attr) + define_fixed_variariable_constraints(n, snapshots, c, attr) + + define_ramp_limit_constraints(n, snapshots) + define_storage_unit_constraints(n, snapshots) + define_store_constraints(n, snapshots) + define_kirchhoff_constraints(n) + define_nodal_balance_constraints(n, snapshots) + define_global_constraints(n, snapshots) + define_objective(n) + + if extra_functionality is not None: + extra_functionality(n, snapshots) + + n.objective_f.close() + n.constraints_f.close() + n.bounds_f.write("end\n") + n.bounds_f.close() + + del n.objective_f + del n.constraints_f + del n.bounds_f + + os.system(f"cat {objective_fn} {constraints_fn} {bounds_fn} " + f"> {n.problem_fn}") + + time_info('Total preparation time:') + + if not keep_files: + for fn in [objective_fn, constraints_fn, bounds_fn]: + os.system("rm "+ fn) + + +def assign_solution(n, sns, variables_sol, constraints_dual, + extra_postprocessing, keep_references=False): + pop = not keep_references + #solutions + def map_solution(c, attr, pnl): + if pnl: + variables = get_var(n, c, attr, pop=pop) + if variables.empty: return + values = variables.stack().map(variables_sol).unstack() + if c in n.passive_branch_components: + n.pnl(c)['p0'] = values + n.pnl(c)['p1'] = - values + elif c == 'Link': + n.pnl(c)['p0'] = values + n.pnl(c)['p1'] = - values * n.df(c).efficiency + else: + n.pnl(c)[attr] = values + elif not get_extendable_i(n, c).empty: + n.df(c)[attr+'_opt'] = get_var(n, c, attr, pop=pop)\ + .map(variables_sol).fillna(n.df(c)[attr]) + else: + n.df(c)[attr+'_opt'] = n.df(c)[attr] + + for (c, attr), pnl in n.variables.pnl.items(): + map_solution(c, attr, pnl) + + if not n.df('StorageUnit').empty: + c = 'StorageUnit' + n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] + + #duals + def map_dual(c, attr, pnl): + if pnl: + n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() + .map(-constraints_dual).unstack()) + else: + n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(-constraints_dual) + + for (c, attr), pnl in n.constraints.pnl.items(): + map_dual(c, attr, pnl) + + #load + n.loads_t.p = n.loads_t.p_set + + #injection, why does it include injection in hvdc 'network' + ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), + ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), + ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] + sign = lambda c: n.df(c).sign if 'sign' in n.df(c) else -1 #sign for 'Link' + n.buses_t.p = pd.concat( + [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) + for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum() + + def v_ang_for_(sub): + buses_i = sub.buses_o + if len(buses_i) == 1: return + sub.calculate_B_H(skip_pre=True) + if len(sub.buses_i()) == 1: return + Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) + Z -= Z[sub.slack_bus] + return n.buses_t.p[buses_i] @ Z + n.buses_t.v_ang = (pd.concat( + [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) + .reindex(columns=n.buses.index, fill_value=0)) + + + + +def network_lopf(n, snapshots=None, solver_name="cbc", + solver_logfile=None, extra_functionality=None, + extra_postprocessing=None, formulation="kirchhoff", + keep_references=False, keep_files=False, solver_options={}, + warmstart=False, store_basis=True): + """ + Linear optimal power flow for a group of snapshots. + + Parameters + ---------- + snapshots : list or index slice + A list of snapshots to optimise, must be a subset of + network.snapshots, defaults to network.snapshots + solver_name : string + Must be a solver name that pyomo recognises and that is + installed, e.g. "glpk", "gurobi" + skip_pre : bool, default False + Skip the preliminary steps of computing topology, calculating + dependent values and finding bus controls. + extra_functionality : callable function + This function must take two arguments + `extra_functionality(network,snapshots)` and is called after + the model building is complete, but before it is sent to the + solver. It allows the user to + add/change constraints and add/change the objective function. + solver_logfile : None|string + If not None, sets the logfile option of the solver. + solver_options : dictionary + A dictionary with additional options that get passed to the solver. + (e.g. {'threads':2} tells gurobi to use only 2 cpus) + keep_files : bool, default False + Keep the files that pyomo constructs from OPF problem + construction, e.g. .lp file - useful for debugging + formulation : string + Formulation of the linear power flow equations to use; only "kirchhoff" + is currently supported + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user to + extract further information about the solution, such as additional + shadow prices. + + Returns + ------- + None + """ + supported_solvers = ["cbc", "gurobi", 'glpk', 'scs'] + if solver_name not in supported_solvers: + raise NotImplementedError(f"Solver {solver_name} not in " + f"supported solvers: {supported_solvers}") + + if formulation != "kirchhoff": + raise NotImplementedError("Only the kirchhoff formulation is supported") + + #disable logging because multiple slack bus calculations, keep output clean + snapshots = _as_snapshots(n, snapshots) + n.calculate_dependent_values() + n.determine_network_topology() + + if solver_logfile is None: + solver_logfile = "test.log" + + logger.info("Prepare linear problem") + prepare_lopf(n, snapshots, keep_files, extra_functionality) + gc.collect() + solution_fn = "/tmp/test-{}.sol".format(n.identifier) + + if warmstart == True: + warmstart = n.basis_fn + logger.info("Solve linear problem using warmstart") + else: + logger.info("Solve linear problem") + + solve = eval(f'run_and_read_{solver_name}') + res = solve(n, n.problem_fn, solution_fn, solver_logfile, + solver_options, keep_files, warmstart, store_basis) + status, termination_condition, variables_sol, constraints_dual, obj = res + del n.problem_fn + + if termination_condition != "optimal": + return status,termination_condition + + #adjust objective value + for c, attr in nominals.items(): + obj -= n.df(c)[attr] @ n.df(c).capital_cost + n.objective = obj + gc.collect() + assign_solution(n, snapshots, variables_sol, constraints_dual, + extra_postprocessing, keep_references=keep_references) + gc.collect() + + return status,termination_condition + + +def ilopf(n, snapshots=None, msq_threshold=0.05, min_iterations=1, + max_iterations=100, **kwargs): + ''' + Iterative linear optimization updating the line parameters for passive + AC and DC lines. This is helpful when line expansion is enabled. After each + sucessful solving, line impedances and line resistance are recalculated + based on the optimization result. If warmstart is possible, it uses the + result from the previous iteration to fasten the optimization. + + Parameters + ---------- + snapshots : list or index slice + A list of snapshots to optimise, must be a subset of + network.snapshots, defaults to network.snapshots + msq_threshold: float, default 0.05 + Maximal mean square difference between optimized line capacity of + the current and the previous iteration. As soon as this threshold is + undercut, and the number of iterations is bigger than 'min_iterations' + the iterative optimization stops + min_iterations : integer, default 1 + Minimal number of iteration to run regardless whether the msq_threshold + is already undercut + max_iterations : integer, default 100 + Maximal numbder of iterations to run regardless whether msq_threshold + is already undercut + **kwargs + Keyword arguments of the lopf function which runs at each iteration + + ''' + + ext_i = get_extendable_i(n, 'Line') + typed_i = n.lines.query('type != ""').index + ext_untyped_i = ext_i.difference(typed_i) + ext_typed_i = ext_i & typed_i + base_s_nom = (np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * + n.lines.bus0.map(n.buses.v_nom)) + n.lines.loc[ext_typed_i, 'num_parallel'] = (n.lines.s_nom/base_s_nom)[ext_typed_i] + + def update_line_params(n, s_nom_prev): + factor = n.lines.s_nom_opt / s_nom_prev + for attr, carrier in (('x', 'AC'), ('r', 'DC')): + ln_i = (n.lines.query('carrier == @carrier').index & ext_untyped_i) + n.lines.loc[ln_i, attr] /= factor[ln_i] + ln_i = ext_i & typed_i + n.lines.loc[ln_i, 'num_parallel'] = (n.lines.s_nom_opt/base_s_nom)[ln_i] + + def msq_diff(n, s_nom_prev): + lines_err = np.sqrt((s_nom_prev - n.lines.s_nom_opt).pow(2).mean()) / \ + n.lines['s_nom_opt'].mean() + logger.info(f"Mean square difference after iteration {iteration} is " + f"{lines_err}") + return lines_err + + iteration = 0 + diff = msq_threshold + while diff >= msq_threshold or iteration < min_iterations: + if iteration >= max_iterations: + logger.info(f'Iteration {iteration} beyond max_iterations ' + f'{max_iterations}. Stopping ...') + break + + s_nom_prev = n.lines.s_nom_opt if iteration else n.lines.s_nom + kwargs['warmstart'] = bool(iteration and ('basis_fn' in n.__dir__())) + network_lopf(n, snapshots, **kwargs) + update_line_params(n, s_nom_prev) + diff = msq_diff(n, s_nom_prev) + iteration += 1 + + + +# ============================================================================= +# test/double-check constraints +# ============================================================================= + + +def describe_storage_unit_contraints(n): + """ + Checks whether all storage units are balanced over time. This function + requires the network to contain the separate variables p_store and + p_dispatch, since they cannot be reconstructed from p. The latter results + from times tau where p_store(tau) > 0 **and** p_dispatch(tau) > 0, which + is allowed (even though not economic). Therefor p_store is necessarily + equal to negative entries of p, vice versa for p_dispatch. + """ + sus = n.storage_units + sus_i = sus.index + if sus_i.empty: return + sns = n.snapshots + c = 'StorageUnit' + pnl = n.pnl(c) + + description = {} + + eh = expand_series(n.snapshot_weightings, sus_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + dispatch_eff = expand_series(n.df(c).efficiency_dispatch, sns).T + store_eff = expand_series(n.df(c).efficiency_store, sns).T + inflow = get_as_dense(n, c, 'inflow') * eh + spill = eh[pnl.spill.columns] * pnl.spill + + description['Spillage Limit'] = pd.Series({'min': + (inflow[spill.columns] - spill).min().min()}) + + if 'p_store' in pnl: + soc = pnl.state_of_charge + + store = store_eff * eh * pnl.p_store#.clip(upper=0) + dispatch = 1/dispatch_eff * eh * pnl.p_dispatch#(lower=0) + start = soc.iloc[-1].where(sus.cyclic_state_of_charge, + sus.state_of_charge_initial) + previous_soc = stand_eff * soc.shift().fillna(start) + + + reconstructed = (previous_soc.add(store, fill_value=0) + .add(inflow, fill_value=0) + .add(-dispatch, fill_value=0) + .add(-spill, fill_value=0)) + description['SOC Balance StorageUnit'] = ((reconstructed - soc) + .unstack().describe()) + else: + logging.info('Storage Unit SOC balance not reconstructable as no ' + 'p_store and p_dispatch in n.storage_units_t.') + return pd.concat(description, axis=1, sort=False) + + +def describe_nodal_balance_constraint(n): + """ + Helper function to double check whether network flow is balanced + """ + network_injection = pd.concat( + [n.pnl(c)[f'p{inout}'].rename(columns=n.df(c)[f'bus{inout}']) + for inout in (0, 1) for c in ('Line', 'Transformer')], axis=1)\ + .groupby(level=0, axis=1).sum() + return (n.buses_t.p - network_injection).unstack().describe()\ + .to_frame('Nodal Balance Constr.') + +def describe_upper_dispatch_constraints(n): + ''' + Recalculates the minimum gap between operational status and nominal capacity + ''' + description = {} + key = ' Upper Limit' + for c, attr in nominals.items(): + dispatch_attr = 'p0' if c in ['Line', 'Transformer', 'Link'] else attr[0] + description[c + key] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') - + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_lower_dispatch_constraints(n): + description = {} + key = ' Lower Limit' + for c, attr in nominals.items(): + if c in ['Line', 'Transformer', 'Link']: + dispatch_attr = 'p0' + description[c] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + else: + dispatch_attr = attr[0] + description[c + key] = pd.Series({'min': + (-n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_min_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_store_contraints(n): + """ + Checks whether all stores are balanced over time. + """ + stores = n.stores + stores_i = stores.index + if stores_i.empty: return + sns = n.snapshots + c = 'Store' + pnl = n.pnl(c) + + eh = expand_series(n.snapshot_weightings, stores_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + + start = pnl.e.iloc[-1].where(stores.e_cyclic, stores.e_initial) + previous_e = stand_eff * pnl.e.shift().fillna(start) + + return (previous_e - pnl.p - pnl.e).unstack().describe()\ + .to_frame('SOC Balance Store') + + +def describe_cycle_constraints(n): + weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + + def cycle_flow(sub): + C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + if C.empty: + return None + C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + return C_weighted.apply(lambda ds: ds @ n.lines_t.p0[ds.index].T) + + return pd.concat([cycle_flow(sub) for sub in n.sub_networks.obj], axis=0)\ + .unstack().describe().to_frame('Cycle Constr.') + + + +def constraint_stats(n, round_digit=1e-30): + """ + Post-optimization function to recalculate gap statistics of different + constraints. For inequality constraints only the minimum of lhs - rhs, with + lhs >= rhs is returned. + """ + return pd.concat([describe_cycle_constraints(n), + describe_store_contraints(n), + describe_storage_unit_contraints(n), + describe_nodal_balance_constraint(n), + describe_lower_dispatch_constraints(n), + describe_upper_dispatch_constraints(n)], + axis=1, sort=False) + +def check_constraints(n, tol=1e-3): + """ + Post-optimization test function to double-check most of the lopf + constraints. For relevant equaility constraints, it test whether the + deviation between lhs and rhs is below the given tolerance. For inequality + constraints, it test whether the inequality is violated with a higher + value then the tolerance. + + Parameters + ---------- + n : pypsa.Network + tol : float + Gap tolerance + + Returns AssertionError if tolerance is exceeded. + + """ + stats = constraint_stats(n).rename(index=str.title) + condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T + assert condition.empty, (f'The following constraint(s) are exceeding the ' + f'given tolerance of {tol}: \n{condition}') + + + + From 9409c783fd134b6a2881156899aa7d9295cf9b29 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Oct 2019 12:13:53 +0200 Subject: [PATCH 013/111] distribute nomopyomo code oder modules --- pypsa/__init__.py | 3 +- pypsa/components.py | 8 +- pypsa/descriptors.py | 76 +- pypsa/{opf_lowmemory.py => linopf.py} | 33 +- pypsa/{opt_lowmemory.py => linopt.py} | 253 +---- pypsa/opf.py | 8 +- pypsa/opt.py | 12 + pypsa/solve.py | 1323 ------------------------- pypsa/stats.py | 187 ++++ test/test_ac_dc_lopf.py | 16 +- test/test_opf_storage.py | 16 +- 11 files changed, 324 insertions(+), 1611 deletions(-) rename pypsa/{opf_lowmemory.py => linopf.py} (96%) rename pypsa/{opt_lowmemory.py => linopt.py} (61%) delete mode 100644 pypsa/solve.py create mode 100644 pypsa/stats.py diff --git a/pypsa/__init__.py b/pypsa/__init__.py index b44764a68..8cfc9fc74 100644 --- a/pypsa/__init__.py +++ b/pypsa/__init__.py @@ -26,7 +26,8 @@ from __future__ import absolute_import from . import components, descriptors -from . import (pf, opf, opt, plot, networkclustering, io, contingency, geo, solve) +from . import (pf, opf, opt, plot, networkclustering, io, contingency, geo, + linopf, linopt, stats) from .components import Network, SubNetwork diff --git a/pypsa/components.py b/pypsa/components.py index fbb53249f..adb46bc0e 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -21,24 +21,18 @@ # make the code as Python 3 compatible as possible from __future__ import division, absolute_import -import six from six import iteritems, itervalues, iterkeys -from six.moves import map from weakref import ref __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS), GNU GPL 3" -import networkx as nx import numpy as np import pandas as pd -import scipy as sp, scipy.sparse from scipy.sparse import csgraph -from itertools import chain from collections import namedtuple -from operator import itemgetter import os @@ -67,7 +61,7 @@ from .opf import network_lopf, network_opf -from .solve import network_lopf as network_lopf_lowmem +from .linopf import network_lopf as network_lopf_lowmem from .plot import plot, iplot diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index fe7dc9cc4..6c3fb1b2b 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -22,19 +22,15 @@ # make the code as Python 3 compatible as possible from __future__ import division from __future__ import absolute_import -from six import iteritems, string_types +from six import iteritems __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), GNU GPL 3" - - - #weak references are necessary to make sure the key-value pair are #destroyed if the key object goes out of scope -from weakref import WeakKeyDictionary from collections import OrderedDict from itertools import repeat @@ -303,3 +299,73 @@ def zsum(s, *args, **kwargs): Meant to be set as pd.Series.zsum = zsum. """ return 0 if s.empty else s.sum(*args, **kwargs) + +#Perhaps this should rather go into components.py +nominal_attrs = {'Generator': 'p_nom', + 'Line': 's_nom', + 'Transformer': 's_nom', + 'Link': 'p_nom', + 'Store': 'e_nom', + 'StorageUnit': 'p_nom'} + +def expand_series(ser, columns): + """ + Helper function to fastly expand a series to a dataframe with according + column axis and every single column being the equal to the given series. + """ + return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) + + +def get_extendable_i(n, c): + """ + Getter function. Get the index of extendable elements of a given component. + """ + return n.df(c)[lambda ds: + ds[nominal_attrs[c] + '_extendable']].index + +def get_non_extendable_i(n, c): + """ + Getter function. Get the index of non-extendable elements of a given + component. + """ + return n.df(c)[lambda ds: + ~ds[nominal_attrs[c] + '_extendable']].index + +def get_bounds_pu(n, c, sns, index=slice(None), attr=None): + """ + Getter function to retrieve the per unit bounds of a given compoent for + given snapshots and possible subset of elements (e.g. non-extendables). + Depending on the attr you can further specify the bounds of the variable + you are looking at, e.g. p_store for storage units. + + Parameters + ---------- + n : pypsa.Network + c : string + Component name, e.g. "Generator", "Line". + sns : pandas.Index/pandas.DateTimeIndex + set of snapshots for the bounds + index : pd.Index, default None + Subset of the component elements. If None (default) bounds of all + elements are returned. + attr : string, default None + attribute name for the bounds, e.g. "p", "s", "p_store" + + """ + min_pu_str = nominal_attrs[c].replace('nom', 'min_pu') + max_pu_str = nominal_attrs[c].replace('nom', 'max_pu') + + max_pu = get_switchable_as_dense(n, c, max_pu_str, sns) + if c in n.passive_branch_components: + min_pu = - max_pu + elif c == 'StorageUnit': + min_pu = pd.DataFrame(0, max_pu.index, max_pu.columns) + if attr == 'p_store': + max_pu = - get_switchable_as_dense(n, c, min_pu_str, sns) + if attr == 'state_of_charge': + max_pu = expand_series(n.df(c).max_hours, sns).T + min_pu = pd.DataFrame(0, *max_pu.axes) + else: + min_pu = get_switchable_as_dense(n, c, min_pu_str, sns) + return min_pu[index], max_pu[index] + diff --git a/pypsa/opf_lowmemory.py b/pypsa/linopf.py similarity index 96% rename from pypsa/opf_lowmemory.py rename to pypsa/linopf.py index 7f53e2d0a..30c1acfcd 100644 --- a/pypsa/opf_lowmemory.py +++ b/pypsa/linopf.py @@ -13,18 +13,20 @@ ## You should have received a copy of the GNU General Public License ## along with this program. If not, see . -"""nomopyomo: build optimisation problems from PyPSA networks without -Pyomo. nomopyomo = no more Pyomo.""" +""" +Build optimisation problems from PyPSA networks without Pyomo. +Originally retrieved from nomopyomo ( -> 'no more Pyomo'). +""" -from .opt_lowmemory import (get_bounds_pu, get_extendable_i, linexpr, - get_non_extendable_i, write_bound, write_constraint, - set_conref, set_varref, get_con, get_var, lookup, - nominals, reset_counter, expand_series, join_exprs) -from .pf import (find_cycles as find_cycles, _as_snapshots, - get_switchable_as_dense as get_as_dense) +from .pf import (_as_snapshots, get_switchable_as_dense as get_as_dense) +from .descriptors import (get_bounds_pu, get_extendable_i, get_non_extendable_i, + expand_series, nominal_attrs) + +from .linopt import (linexpr, write_bound, write_constraint, set_conref, + set_varref, get_con, get_var, reset_counter, join_exprs, + run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk) -from . import opt_lowmemory import pandas as pd import numpy as np @@ -34,6 +36,9 @@ import logging logger = logging.getLogger(__name__) +lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), + index_col=['component', 'variable']) + def define_nominal_for_extendable_variables(n, c, attr): ext_i = get_extendable_i(n, c) if ext_i.empty: return @@ -53,7 +58,7 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): def define_dispatch_for_non_extendable_variables(n, sns, c, attr): fix_i = get_non_extendable_i(n, c) if fix_i.empty: return - nominal_fix = n.df(c)[nominals.at[c]][fix_i] + nominal_fix = n.df(c)[nominal_attrs.at[c]][fix_i] min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) lower = min_pu.mul(nominal_fix) upper = max_pu.mul(nominal_fix) @@ -66,7 +71,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): if ext_i.empty: return min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) operational_ext_v = get_var(n, c, attr)[ext_i] - nominal_v = get_var(n, c, nominals.at[c])[ext_i] + nominal_v = get_var(n, c, nominal_attrs.at[c])[ext_i] rhs = 0 lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), @@ -352,7 +357,7 @@ def define_objective(n): for t in terms.flatten(): n.objective_f.write(t) #investment - for c, attr in nominals.items(): + for c, attr in nominal_attrs.items(): cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] if cost.empty: continue terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' @@ -574,7 +579,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", else: logger.info("Solve linear problem") - solve = getattr(opt_lowmemory, f'run_and_read_{solver_name}') + solve = eval(f'run_and_read_{solver_name}') res = solve(n, n.problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart, store_basis) status, termination_condition, variables_sol, constraints_dual, obj = res @@ -584,7 +589,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", return status,termination_condition #adjust objective value - for c, attr in nominals.items(): + for c, attr in nominal_attrs.items(): obj -= n.df(c)[attr] @ n.df(c).capital_cost n.objective = obj gc.collect() diff --git a/pypsa/opt_lowmemory.py b/pypsa/linopt.py similarity index 61% rename from pypsa/opt_lowmemory.py rename to pypsa/linopt.py index fad06de90..62b63bc5b 100644 --- a/pypsa/opt_lowmemory.py +++ b/pypsa/linopt.py @@ -1,22 +1,24 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Created on Sat Sep 7 17:38:10 2019 +Tools for fast Linear Problem file writing. This module contains -@author: fabian +- io functions for writing out variables, constraints and objective + into a lp file. +- functions to create lp format based linear expression +- solver functions which read the lp file, run the problem and return the + solution + +This module supports the linear optimal power flow calculation whithout using +pyomo (see module linopt.py) """ import pandas as pd import os, logging, re, io, subprocess import numpy as np -from .descriptors import get_switchable_as_dense as get_as_dense from pandas import IndexSlice as idx -lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), - index_col=['component', 'variable']) -nominals = lookup.query('nominal').reset_index(level='variable').variable - # ============================================================================= # writing functions # ============================================================================= @@ -174,69 +176,6 @@ def join_exprs(df): """ return ''.join(np.asarray(df).flatten()) -def expand_series(ser, columns): - """ - Helper function to fastly expand a series to a dataframe with according - column axis and every single column being the equal to the given series. - """ - return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) - -# ============================================================================= -# 'getter' functions -# ============================================================================= -def get_extendable_i(n, c): - """ - Getter function. Get the index of extendable elements of a given component. - """ - return n.df(c)[lambda ds: - ds[nominals[c] + '_extendable']].index - -def get_non_extendable_i(n, c): - """ - Getter function. Get the index of non-extendable elements of a given - component. - """ - return n.df(c)[lambda ds: - ~ds[nominals[c] + '_extendable']].index - -def get_bounds_pu(n, c, sns, index=slice(None), attr=None): - """ - Getter function to retrieve the per unit bounds of a given compoent for - given snapshots and possible subset of elements (e.g. non-extendables). - Depending on the attr you can further specify the bounds of the variable - you are looking at, e.g. p_store for storage units. - - Parameters - ---------- - n : pypsa.Network - c : string - Component name, e.g. "Generator", "Line". - sns : pandas.Index/pandas.DateTimeIndex - set of snapshots for the bounds - index : pd.Index, default None - Subset of the component elements. If None (default) bounds of all - elements are returned. - attr : string, default None - attribute name for the bounds, e.g. "p", "s", "p_store" - - """ - min_pu_str = nominals[c].replace('nom', 'min_pu') - max_pu_str = nominals[c].replace('nom', 'max_pu') - - max_pu = get_as_dense(n, c, max_pu_str, sns) - if c in n.passive_branch_components: - min_pu = - max_pu - elif c == 'StorageUnit': - min_pu = pd.DataFrame(0, max_pu.index, max_pu.columns) - if attr == 'p_store': - max_pu = - get_as_dense(n, c, min_pu_str, sns) - if attr == 'state_of_charge': - max_pu = expand_series(n.df(c).max_hours, sns).T - min_pu = pd.DataFrame(0, *max_pu.axes) - else: - min_pu = get_as_dense(n, c, min_pu_str, sns) - return min_pu[index], max_pu[index] - # ============================================================================= # references to vars and cons, rewrite this part to not store every reference @@ -491,177 +430,3 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, return (status, termination_condition, variables_sol, constraints_dual, objective) - -# ============================================================================= -# test/double-check constraints -# ============================================================================= - - -def describe_storage_unit_contraints(n): - """ - Checks whether all storage units are balanced over time. This function - requires the network to contain the separate variables p_store and - p_dispatch, since they cannot be reconstructed from p. The latter results - from times tau where p_store(tau) > 0 **and** p_dispatch(tau) > 0, which - is allowed (even though not economic). Therefor p_store is necessarily - equal to negative entries of p, vice versa for p_dispatch. - """ - sus = n.storage_units - sus_i = sus.index - if sus_i.empty: return - sns = n.snapshots - c = 'StorageUnit' - pnl = n.pnl(c) - - description = {} - - eh = expand_series(n.snapshot_weightings, sus_i) - stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - dispatch_eff = expand_series(n.df(c).efficiency_dispatch, sns).T - store_eff = expand_series(n.df(c).efficiency_store, sns).T - inflow = get_as_dense(n, c, 'inflow') * eh - spill = eh[pnl.spill.columns] * pnl.spill - - description['Spillage Limit'] = pd.Series({'min': - (inflow[spill.columns] - spill).min().min()}) - - if 'p_store' in pnl: - soc = pnl.state_of_charge - - store = store_eff * eh * pnl.p_store#.clip(upper=0) - dispatch = 1/dispatch_eff * eh * pnl.p_dispatch#(lower=0) - start = soc.iloc[-1].where(sus.cyclic_state_of_charge, - sus.state_of_charge_initial) - previous_soc = stand_eff * soc.shift().fillna(start) - - - reconstructed = (previous_soc.add(store, fill_value=0) - .add(inflow, fill_value=0) - .add(-dispatch, fill_value=0) - .add(-spill, fill_value=0)) - description['SOC Balance StorageUnit'] = ((reconstructed - soc) - .unstack().describe()) - else: - logging.info('Storage Unit SOC balance not reconstructable as no ' - 'p_store and p_dispatch in n.storage_units_t.') - return pd.concat(description, axis=1, sort=False) - - -def describe_nodal_balance_constraint(n): - """ - Helper function to double check whether network flow is balanced - """ - network_injection = pd.concat( - [n.pnl(c)[f'p{inout}'].rename(columns=n.df(c)[f'bus{inout}']) - for inout in (0, 1) for c in ('Line', 'Transformer')], axis=1)\ - .groupby(level=0, axis=1).sum() - return (n.buses_t.p - network_injection).unstack().describe()\ - .to_frame('Nodal Balance Constr.') - -def describe_upper_dispatch_constraints(n): - ''' - Recalculates the minimum gap between operational status and nominal capacity - ''' - description = {} - key = ' Upper Limit' - for c, attr in nominals.items(): - dispatch_attr = 'p0' if c in ['Line', 'Transformer', 'Link'] else attr[0] - description[c + key] = pd.Series({'min': - (n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_max_pu') - - n.pnl(c)[dispatch_attr]).min().min()}) - return pd.concat(description, axis=1) - - -def describe_lower_dispatch_constraints(n): - description = {} - key = ' Lower Limit' - for c, attr in nominals.items(): - if c in ['Line', 'Transformer', 'Link']: - dispatch_attr = 'p0' - description[c] = pd.Series({'min': - (n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_max_pu') + - n.pnl(c)[dispatch_attr]).min().min()}) - else: - dispatch_attr = attr[0] - description[c + key] = pd.Series({'min': - (-n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_min_pu') + - n.pnl(c)[dispatch_attr]).min().min()}) - return pd.concat(description, axis=1) - - -def describe_store_contraints(n): - """ - Checks whether all stores are balanced over time. - """ - stores = n.stores - stores_i = stores.index - if stores_i.empty: return - sns = n.snapshots - c = 'Store' - pnl = n.pnl(c) - - eh = expand_series(n.snapshot_weightings, stores_i) - stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - - start = pnl.e.iloc[-1].where(stores.e_cyclic, stores.e_initial) - previous_e = stand_eff * pnl.e.shift().fillna(start) - - return (previous_e - pnl.p - pnl.e).unstack().describe()\ - .to_frame('SOC Balance Store') - - -def describe_cycle_constraints(n): - weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) - - def cycle_flow(sub): - C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) - if C.empty: - return None - C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) - return C_weighted.apply(lambda ds: ds @ n.lines_t.p0[ds.index].T) - - return pd.concat([cycle_flow(sub) for sub in n.sub_networks.obj], axis=0)\ - .unstack().describe().to_frame('Cycle Constr.') - - - -def constraint_stats(n, round_digit=1e-30): - """ - Post-optimization function to recalculate gap statistics of different - constraints. For inequality constraints only the minimum of lhs - rhs, with - lhs >= rhs is returned. - """ - return pd.concat([describe_cycle_constraints(n), - describe_store_contraints(n), - describe_storage_unit_contraints(n), - describe_nodal_balance_constraint(n), - describe_lower_dispatch_constraints(n), - describe_upper_dispatch_constraints(n)], - axis=1, sort=False) - -def check_constraints(n, tol=1e-3): - """ - Post-optimization test function to double-check most of the lopf - constraints. For relevant equaility constraints, it test whether the - deviation between lhs and rhs is below the given tolerance. For inequality - constraints, it test whether the inequality is violated with a higher - value then the tolerance. - - Parameters - ---------- - n : pypsa.Network - tol : float - Gap tolerance - - Returns AssertionError if tolerance is exceeded. - - """ - stats = constraint_stats(n).rename(index=str.title) - condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T - assert condition.empty, (f'The following constraint(s) are exceeding the ' - f'given tolerance of {tol}: \n{condition}') - - diff --git a/pypsa/opf.py b/pypsa/opf.py index 9c1f4bccd..140dabd4e 100644 --- a/pypsa/opf.py +++ b/pypsa/opf.py @@ -31,9 +31,8 @@ import numpy as np import pandas as pd from scipy.sparse.linalg import spsolve -from pyomo.environ import (ConcreteModel, Var, Objective, - NonNegativeReals, Constraint, Reals, - Suffix, Expression, Binary, SolverFactory) +from pyomo.environ import (ConcreteModel, Var, NonNegativeReals, Constraint, + Reals, Suffix, Binary, SolverFactory) try: from pyomo.solvers.plugins.solvers.persistent_solver import PersistentSolver @@ -41,8 +40,6 @@ # Only used in conjunction with isinstance, so we mock it to be backwards compatible class PersistentSolver(): pass -from itertools import chain - import logging logger = logging.getLogger(__name__) @@ -57,7 +54,6 @@ class PersistentSolver(): pass find_bus_controls, calculate_B_H, calculate_PTDF, find_tree, find_cycles, _as_snapshots) from .opt import (l_constraint, l_objective, LExpression, LConstraint, - patch_optsolver_free_model_before_solving, patch_optsolver_record_memusage_before_solving, empty_network, free_pyomo_initializers) from .descriptors import (get_switchable_as_dense, get_switchable_as_iter, diff --git a/pypsa/opt.py b/pypsa/opt.py index 0951c8349..3d692e324 100644 --- a/pypsa/opt.py +++ b/pypsa/opt.py @@ -52,6 +52,10 @@ __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), GNU GPL 3" +# ============================================================================= +# Tools for solving with pyomo +# ============================================================================= + class LExpression(object): """Affine expression of optimisation variables. @@ -382,3 +386,11 @@ def wrapper(): except ImportError: logger.debug("Unable to measure memory usage, since the resource library is missing") return False + + +# ============================================================================= +# Helpers for opf_lowmemory +# ============================================================================= + + + diff --git a/pypsa/solve.py b/pypsa/solve.py deleted file mode 100644 index 4995db817..000000000 --- a/pypsa/solve.py +++ /dev/null @@ -1,1323 +0,0 @@ -## Copyright 2019 Tom Brown (KIT), Fabian Hofmann (FIAS) - -## This program is free software; you can redistribute it and/or -## modify it under the terms of the GNU General Public License as -## published by the Free Software Foundation; either version 3 of the -## License, or (at your option) any later version. - -## This program is distributed in the hope that it will be useful, -## but WITHOUT ANY WARRANTY; without even the implied warranty of -## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -## GNU General Public License for more details. - -## You should have received a copy of the GNU General Public License -## along with this program. If not, see . - -"""nomopyomo: build optimisation problems from PyPSA networks without -Pyomo. nomopyomo = no more Pyomo.""" - -from .descriptors import get_switchable_as_dense as get_as_dense -from .pf import _as_snapshots - -import pandas as pd -from pandas import IndexSlice as idx -import numpy as np - -import gc, string, random, time, os, re, subprocess, io - -import logging -logger = logging.getLogger(__name__) - - -lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), - index_col=['component', 'variable']) -nominals = lookup.query('nominal').reset_index(level='variable').variable - -# ============================================================================= -# writing functions -# ============================================================================= - -xCounter = 0 -cCounter = 0 -def reset_counter(): - global xCounter, cCounter - xCounter, cCounter = 0, 0 - - -def write_bound(n, lower, upper, axes=None): - """ - Writer function for writing out mutliple variables at a time. If lower and - upper are floats it demands to give pass axes, a tuple of (index, columns) - or (index), for creating the variable of same upper and lower bounds. - Return a series or frame with variable references. - """ - axes = [axes] if isinstance(axes, pd.Index) else axes - if axes is None: - axes, shape = broadcasted_axes(lower, upper) - else: - shape = tuple(map(len, axes)) - ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = np.prod(shape) - global xCounter - xCounter += length - variables = np.array([f'x{x}' for x in range(xCounter - length, xCounter)], - dtype=object).reshape(shape) - lower, upper = _str_array(lower), _str_array(upper) - for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): - n.bounds_f.write(s) - return ser_or_frame(variables, *axes) - -def write_constraint(n, lhs, sense, rhs, axes=None): - """ - Writer function for writing out mutliple constraints to the corresponding - constraints file. If lower and upper are numpy.ndarrays it axes must not be - None but a tuple of (index, columns) or (index). - Return a series or frame with constraint references. - """ - axes = [axes] if isinstance(axes, pd.Index) else axes - if axes is None: - axes, shape = broadcasted_axes(lhs, rhs) - else: - shape = tuple(map(len, axes)) - ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = np.prod(shape) - global cCounter - cCounter += length - cons = np.array([f'c{x}' for x in range(cCounter - length, cCounter)], - dtype=object).reshape(shape) - if isinstance(sense, str): - sense = '=' if sense == '==' else sense - lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) - for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): - n.constraints_f.write(c) - return ser_or_frame(cons, *axes) - - -# ============================================================================= -# helpers, helper functions -# ============================================================================= - -var_ref_suffix = '_varref' # after solving replace with '_opt' -con_ref_suffix = '_conref' # after solving replace with '' - -def broadcasted_axes(*dfs): - """ - Helper function which, from a collection of arrays, series, frames and other - values, retrieves the axes of series and frames which result from - broadcasting operations. It checks whether index and columns of given - series and frames, repespectively, are aligned. Using this function allows - to subsequently use pure numpy operations and keep the axes in the - background. - """ - axes = [] - shape = () - for df in dfs: - if isinstance(df, (pd.Series, pd.DataFrame)): - if len(axes): - assert (axes[-1] == df.axes[-1]).all(), ('Series or DataFrames ' - 'are not aligned') - axes = df.axes if len(df.axes) > len(axes) else axes - shape = tuple(map(len, axes)) - return axes, shape - - -def linexpr(*tuples, return_axes=False): - """ - Elementwise concatenation of tuples in the form (coefficient, variables). - Coefficient and variables can be arrays, series or frames. Returns - a np.ndarray of strings. If return_axes is set to True and a pd.Series or - pd.DataFrame was past, the corresponding index (and column if existent) is - returned additionaly. - - Parameters - ---------- - tulples: tuple of tuples - Each tuple must of the form (coeff, var), where - * coeff is a numerical value, or a numeical array, series, frame - * var is a str or a array, series, frame of variable strings - return_axes: Boolean, default False - Whether to return index and column (if existent) - - Example - ------- - >>> coeff1 = 1 - >>> var1 = pd.Series(['a1', 'a2', 'a3']) - >>> coeff2 = pd.Series([-0.5, -0.3, -1]) - >>> var2 = pd.Series(['b1', 'b2', 'b3']) - - >>> linexpr((coeff1, var1), (coeff2, var2)) - array(['+1.0 a1\n-0.5 b1\n', '+1.0 a2\n-0.3 b2\n', '+1.0 a3\n-1.0 b3\n'], - dtype=object) - - - For turning the result into a series or frame again: - >>> pd.Series(*linexpr((coeff1, var1), (coeff2, var2), return_axes=True)) - 0 +1.0 a1\n-0.5 b1\n - 1 +1.0 a2\n-0.3 b2\n - 2 +1.0 a3\n-1.0 b3\n - dtype: object - - This can also be applied to DataFrames, using - pd.DataFrame(*linexpr(..., return_axes=True)). - """ - axes, shape = broadcasted_axes(*sum(tuples, ())) - expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) - if np.prod(shape): - for coeff, var in tuples: - expr += _str_array(coeff) + _str_array(var) + '\n' - if return_axes: - return (expr, *axes) - return expr - - -def _str_array(array): - if isinstance(array, (float, int)): - array = f'+{float(array)} ' if array >= 0 else f'{float(array)} ' - elif isinstance(array, (pd.Series, pd.DataFrame)): - array = array.values - if isinstance(array, np.ndarray): - if not (array.dtype == object) and array.size: - signs = pd.Series(array) if array.ndim == 1 else pd.DataFrame(array) - signs = (signs.pipe(np.sign) - .replace([0, 1, -1], ['+', '+', '-']).values) - array = signs + abs(array).astype(str) + ' ' - return array - - -def join_exprs(df): - """ - Helper function to join arrays, series or frames of stings together. - """ - return ''.join(np.asarray(df).flatten()) - -def expand_series(ser, columns): - """ - Helper function to fastly expand a series to a dataframe with according - column axis and every single column being the equal to the given series. - """ - return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) - -# ============================================================================= -# 'getter' functions -# ============================================================================= - -def get_extendable_i(n, c): - """ - Getter function. Get the index of extendable elements of a given component. - """ - return n.df(c)[lambda ds: - ds[nominals[c] + '_extendable']].index - -def get_non_extendable_i(n, c): - """ - Getter function. Get the index of non-extendable elements of a given - component. - """ - return n.df(c)[lambda ds: - ~ds[nominals[c] + '_extendable']].index - -def get_bounds_pu(n, c, sns, index=slice(None), attr=None): - """ - Getter function to retrieve the per unit bounds of a given compoent for - given snapshots and possible subset of elements (e.g. non-extendables). - Depending on the attr you can further specify the bounds of the variable - you are looking at, e.g. p_store for storage units. - - Parameters - ---------- - n : pypsa.Network - c : string - Component name, e.g. "Generator", "Line". - sns : pandas.Index/pandas.DateTimeIndex - set of snapshots for the bounds - index : pd.Index, default None - Subset of the component elements. If None (default) bounds of all - elements are returned. - attr : string, default None - attribute name for the bounds, e.g. "p", "s", "p_store" - - """ - min_pu_str = nominals[c].replace('nom', 'min_pu') - max_pu_str = nominals[c].replace('nom', 'max_pu') - - max_pu = get_as_dense(n, c, max_pu_str, sns) - if c in n.passive_branch_components: - min_pu = - max_pu - elif c == 'StorageUnit': - min_pu = pd.DataFrame(0, max_pu.index, max_pu.columns) - if attr == 'p_store': - max_pu = - get_as_dense(n, c, min_pu_str, sns) - if attr == 'state_of_charge': - max_pu = expand_series(n.df(c).max_hours, sns).T - min_pu = pd.DataFrame(0, *max_pu.axes) - else: - min_pu = get_as_dense(n, c, min_pu_str, sns) - return min_pu[index], max_pu[index] - - -# ============================================================================= -# references to vars and cons, rewrite this part to not store every reference -# ============================================================================= - -def _add_reference(n, df, c, attr, suffix, pnl=True): - attr_name = attr + suffix - if pnl: - if attr_name in n.pnl(c): - n.pnl(c)[attr_name][df.columns] = df - else: - n.pnl(c)[attr_name] = df - if n.pnl(c)[attr_name].shape[1] == n.df(c).shape[0]: - n.pnl(c)[attr_name] = n.pnl(c)[attr_name].reindex(columns=n.df(c).index) - else: - n.df(c).loc[df.index, attr_name] = df - -def set_varref(n, variables, c, attr, pnl=True, spec=''): - """ - Sets variable references to the network. - If pnl is False it stores a series of variable names in the static - dataframe of the given component. The columns name is then given by the - attribute name attr and the globally define var_ref_suffix. - If pnl is True if stores the given frame of references in the component - dict of time-depending quantities, e.g. network.generators_t . - """ - if not variables.empty: - if ((c, attr) in n.variables.index) and (spec != ''): - n.variables.at[idx[c, attr], 'specification'] += ', ' + spec - else: - n.variables.loc[idx[c, attr], :] = [pnl, spec] - _add_reference(n, variables, c, attr, var_ref_suffix, pnl=pnl) - -def set_conref(n, constraints, c, attr, pnl=True, spec=''): - """ - Sets constraint references to the network. - If pnl is False it stores a series of constraints names in the static - dataframe of the given component. The columns name is then given by the - attribute name attr and the globally define con_ref_suffix. - If pnl is True if stores the given frame of references in the component - dict of time-depending quantities, e.g. network.generators_t . - """ - if not constraints.empty: - if ((c, attr) in n.constraints.index) and (spec != ''): - n.constraints.at[idx[c, attr], 'specification'] += ', ' + spec - else: - n.constraints.loc[idx[c, attr], :] = [pnl, spec] - _add_reference(n, constraints, c, attr, con_ref_suffix, pnl=pnl) - - -def get_var(n, c, attr, pop=False): - ''' - Retrieves variable references for a given static or time-depending - attribute of a given component. The function looks into n.variables to - detect whether the variable is a time-dependent or static. - - Parameters - ---------- - n : pypsa.Network - c : str - component name to which the constraint belongs - attr: str - attribute name of the constraints - - Example - ------- - get_var(n, 'Generator', 'p') - - ''' - if n.variables.at[idx[c, attr], 'pnl']: - if pop: - return n.pnl(c).pop(attr + var_ref_suffix) - return n.pnl(c)[attr + var_ref_suffix] - else: - if pop: - return n.df(c).pop(attr + var_ref_suffix) - return n.df(c)[attr + var_ref_suffix] - - -def get_con(n, c, attr, pop=False): - """ - Retrieves constraint references for a given static or time-depending - attribute of a give component. - - Parameters - ---------- - n : pypsa.Network - c : str - component name to which the constraint belongs - attr: str - attribute name of the constraints - - Example - ------- - get_con(n, 'Generator', 'mu_upper') - """ - if n.constraints.at[idx[c, attr], 'pnl']: - if pop: - return n.pnl(c).pop(attr + con_ref_suffix) - return n.pnl(c)[attr + con_ref_suffix] - else: - if pop: - return n.df(c).pop(attr + con_ref_suffix) - return n.df(c)[attr + con_ref_suffix] - - -# ============================================================================= -# solvers -# ============================================================================= - -def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, - solver_options, keep_files, warmstart=None, - store_basis=True): - #printingOptions is about what goes in solution file - command = f"cbc -printingOptions all -import {problem_fn} " - if warmstart: - command += f'-basisI {warmstart} ' - if (solver_options is not None) and (solver_options != {}): - command += solver_options - command += f"-solve -solu {solution_fn} " - if store_basis: - n.basis_fn = solution_fn.replace('.sol', '.bas') - command += f'-basisO {n.basis_fn} ' - - if solver_logfile is None: - os.system(command) - else: - result = subprocess.run(command.split(' '), stdout=subprocess.PIPE) - print(result.stdout.decode('utf-8'), file=open(solver_logfile, 'w')) - - f = open(solution_fn,"r") - data = f.readline() - f.close() - - if data.startswith("Optimal - objective value"): - status = "optimal" - termination_condition = status - objective = float(data[len("Optimal - objective value "):]) - elif "Infeasible" in data: - termination_condition = "infeasible" - else: - termination_condition = "other" - - if termination_condition != "optimal": - return status, termination_condition, None, None, None - - sol = pd.read_csv(solution_fn, header=None, skiprows=[0], - sep=r'\s+', usecols=[1,2,3], index_col=0) - variables_b = sol.index.str[0] == 'x' - variables_sol = sol[variables_b][2] - constraints_dual = sol[~variables_b][3] - - if not keep_files: - os.system("rm "+ problem_fn) - os.system("rm "+ solution_fn) - - return (status, termination_condition, variables_sol, - constraints_dual, objective) - - -def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, - solver_options, keep_files, warmstart=None, - store_basis=True): - # for solver_options lookup https://kam.mff.cuni.cz/~elias/glpk.pdf - command = (f"glpsol --lp {problem_fn} --output {solution_fn}") - if solver_logfile is not None: - command += f' --log {solver_logfile}' - if warmstart: - command += f' --ini {warmstart}' - if store_basis: - n.basis_fn = solution_fn.replace('.sol', '.bas') - command += f' -w {n.basis_fn}' - if (solver_options is not None) and (solver_options != {}): - command += solver_options - - os.system(command) - - data = open(solution_fn) - info = '' - linebreak = False - while not linebreak: - line = data.readline() - linebreak = line == '\n' - info += line - info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] - status = info.Status.lower().strip() - objective = float(re.sub('[^0-9]+', '', info.Objective)) - termination_condition = status - - if termination_condition != "optimal": - return status, termination_condition, None, None, None - - sol = pd.read_fwf(data).set_index('Row name') - variables_b = sol.index.str[0] == 'x' - variables_sol = sol[variables_b]['Activity'].astype(float) - sol = sol[~variables_b] - constraints_b = sol.index.str[0] == 'c' - constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') - .fillna(0)) - - if not keep_files: - os.system("rm "+ problem_fn) - os.system("rm "+ solution_fn) - - return (status, termination_condition, variables_sol, - constraints_dual, objective) - - -def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, - solver_options, keep_files, warmstart=None, - store_basis=True): - import gurobipy - # for solver options see - # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html - if (solver_logfile is not None) and (solver_options is not None): - solver_options["logfile"] = solver_logfile - - # disable logging for this part, as gurobi output is doubled otherwise - logging.disable(50) - m = gurobipy.read(problem_fn) - if solver_options is not None: - for key, value in solver_options.items(): - m.setParam(key, value) - if warmstart: - m.read(warmstart) - m.optimize() - logging.disable(1) - - if store_basis: - n.basis_fn = solution_fn.replace('.sol', '.bas') - try: - m.write(n.basis_fn) - except gurobipy.GurobiError: - logging.info('No model basis stored') - del n.basis_fn - - if not keep_files: - os.system("rm "+ problem_fn) - - Status = gurobipy.GRB.Status - statusmap = {getattr(Status, s) : s.lower() for s in Status.__dir__() - if not s.startswith('_')} - status = statusmap[m.status] - termination_condition = status - if termination_condition != "optimal": - return status, termination_condition, None, None, None - - variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}) - constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) - termination_condition = status - objective = m.ObjVal - del m - return (status, termination_condition, variables_sol, - constraints_dual, objective) - - - -# ============================================================================= -# Setting up the problem -# ============================================================================= - -def define_nominal_for_extendable_variables(n, c, attr): - ext_i = get_extendable_i(n, c) - if ext_i.empty: return - lower = n.df(c)[attr+'_min'][ext_i] - upper = n.df(c)[attr+'_max'][ext_i] - variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr, pnl=False) - - -def define_dispatch_for_extendable_variables(n, sns, c, attr): - ext_i = get_extendable_i(n, c) - if ext_i.empty: return - variables = write_bound(n, -np.inf, np.inf, axes=[sns, ext_i]) - set_varref(n, variables, c, attr, pnl=True, spec='extendables') - - -def define_dispatch_for_non_extendable_variables(n, sns, c, attr): - fix_i = get_non_extendable_i(n, c) - if fix_i.empty: return - nominal_fix = n.df(c)[nominals.at[c]][fix_i] - min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) - lower = min_pu.mul(nominal_fix) - upper = max_pu.mul(nominal_fix) - variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr, pnl=True, spec='nonextendables') - - -def define_dispatch_for_extendable_constraints(n, sns, c, attr): - ext_i = get_extendable_i(n, c) - if ext_i.empty: return - min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) - operational_ext_v = get_var(n, c, attr)[ext_i] - nominal_v = get_var(n, c, nominals.at[c])[ext_i] - rhs = 0 - - lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), - return_axes=True) - constraints = write_constraint(n, lhs, '>=', rhs, axes) - set_conref(n, constraints, c, 'mu_upper', pnl=True, spec=attr) - - lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), - return_axes=True) - constraints = write_constraint(n, lhs, '<=', rhs, axes) - set_conref(n, constraints, c, 'mu_lower', pnl=True, spec=attr) - - -def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): - if pnl: - if attr + '_set' not in n.pnl(c): return - fix = n.pnl(c)[attr + '_set'].unstack().dropna() - if fix.empty: return - lhs = linexpr((1, get_var(n, c, attr).unstack()[fix.index])) - constraints = write_constraint(n, lhs, '=', fix).unstack().T - else: - if attr + '_set' not in n.df(c): return - fix = n.df(c)[attr + '_set'].dropna() - if fix.empty: return - lhs = linexpr((1, get_var(n, c, attr)[fix.index])) - constraints = write_constraint(n, lhs, '=', fix) - set_conref(n, constraints, c, f'mu_{attr}_set', pnl) - - -def define_ramp_limit_constraints(n, sns): - c = 'Generator' - rup_i = n.df(c).query('ramp_limit_up == ramp_limit_up').index - rdown_i = n.df(c).query('ramp_limit_down == ramp_limit_down').index - if rup_i.empty & rdown_i.empty: - return - p = get_var(n, c, 'p').loc[sns[1:]] - p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]] - - #fix up - gens_i = rup_i & get_non_extendable_i(n, c) - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - return_axes=True)) - rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') - constraints = write_constraint(n, lhs, '<=', rhs) - set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='nonextendables') - - #ext up - gens_i = rup_i & get_extendable_i(n, c) - limit_pu = n.df(c)['ramp_limit_up'][gens_i] - p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (-limit_pu, p_nom), return_axes=True)) - constraints = write_constraint(n, lhs, '<=', 0) - set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='extendables') - - #fix down - gens_i = rdown_i & get_non_extendable_i(n, c) - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - return_axes=True)) - rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') - constraints = write_constraint(n, lhs, '>=', rhs) - set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='nonextendables') - - #ext down - gens_i = rdown_i & get_extendable_i(n, c) - limit_pu = n.df(c)['ramp_limit_down'][gens_i] - p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (limit_pu, p_nom), return_axes=True)) - constraints = write_constraint(n, lhs, '>=', 0) - set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='extendables') - - -def define_nodal_balance_constraints(n, sns): - - def bus_injection(c, attr, groupcol='bus', sign=1): - #additional sign only necessary for branches in reverse direction - if 'sign' in n.df(c): - sign = sign * n.df(c).sign - vals = linexpr((sign, get_var(n, c, attr)), return_axes=True) - return pd.DataFrame(*vals).rename(columns=n.df(c)[groupcol]) - - # one might reduce this a bit by using n.branches and lookup - args = [['Generator', 'p'], ['Store', 'p'], ['StorageUnit', 'p_dispatch'], - ['StorageUnit', 'p_store', 'bus', -1], ['Line', 's', 'bus0', -1], - ['Line', 's', 'bus1', 1], ['Transformer', 's', 'bus0', -1], - ['Transformer', 's', 'bus1', 1], ['Link', 'p', 'bus0', -1], - ['Link', 'p', 'bus1', n.links.efficiency]] - args = [arg for arg in args if not n.df(arg[0]).empty] - - lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) - .groupby(axis=1, level=0) - .agg(lambda x: ''.join(x.values)) - .reindex(columns=n.buses.index)) - sense = '=' - rhs = ((- n.loads_t.p_set * n.loads.sign) - .groupby(n.loads.bus, axis=1).sum() - .reindex(columns=n.buses.index, fill_value=0)) - constraints = write_constraint(n, lhs, sense, rhs) - set_conref(n, constraints, 'Bus', 'nodal_balance') - - -def define_kirchhoff_constraints(n): - weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) - - def cycle_flow(ds): - ds = ds[lambda ds: ds!=0.].dropna() - vals = linexpr((ds, get_var(n, 'Line', 's')[ds.index])) + '\n' - return vals.sum(1) - - constraints = [] - for sub in n.sub_networks.obj: - C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) - if C.empty: - continue - C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) - con = write_constraint(n, C_weighted.apply(cycle_flow), '=', 0) - constraints.append(con) - constraints = pd.concat(constraints, axis=1, ignore_index=True) - set_conref(n, constraints, 'Line', 'kirchhoff_voltage') - - -def define_storage_unit_constraints(n, sns): - sus_i = n.storage_units.index - if sus_i.empty: return - c = 'StorageUnit' - #spillage - upper = get_as_dense(n, c, 'inflow').loc[:, lambda df: df.max() > 0] - spill = write_bound(n, 0, upper) - set_varref(n, spill, 'StorageUnit', 'spill') - - #soc constraint previous_soc + p_store - p_dispatch + inflow - spill == soc - eh = expand_series(n.snapshot_weightings, sus_i) #elapsed hours - - eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - eff_dispatch = expand_series(n.df(c).efficiency_dispatch, sns).T - eff_store = expand_series(n.df(c).efficiency_store, sns).T - - soc = get_var(n, c, 'state_of_charge') - cyclic_i = n.df(c).query('cyclic_state_of_charge').index - noncyclic_i = n.df(c).query('~cyclic_state_of_charge').index - - prev_soc_cyclic = soc.shift().fillna(soc.loc[sns[-1]]) - - coeff_var = [(-1, soc), - (-1/eff_dispatch * eh, get_var(n, c, 'p_dispatch')), - (eff_store * eh, get_var(n, c, 'p_store'))] - lhs, *axes = linexpr(*coeff_var, return_axes=True) - - def masked_term(coeff, var, cols): - return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ - .reindex(index=axes[0], columns=axes[1], fill_value='').values - - lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) - lhs += masked_term(eff_stand, prev_soc_cyclic, cyclic_i) - lhs += masked_term(eff_stand.loc[sns[1:]], soc.shift().loc[sns[1:]], noncyclic_i) - - rhs = -get_as_dense(n, c, 'inflow').mul(eh) - rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] - - constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'soc') - - -def define_store_constraints(n, sns): - stores_i = n.stores.index - if stores_i.empty: return - c = 'Store' - variables = write_bound(n, -np.inf, np.inf, axes=[sns, stores_i]) - set_varref(n, variables, c, 'p') - - #previous_e - p == e - eh = expand_series(n.snapshot_weightings, stores_i) #elapsed hours - eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - - e = get_var(n, c, 'e') - cyclic_i = n.df(c).query('e_cyclic').index - noncyclic_i = n.df(c).query('~e_cyclic').index - - previous_e_cyclic = e.shift().fillna(e.loc[sns[-1]]) - - coeff_var = [(-eh, get_var(n, c, 'p')), (-1, e)] - - lhs, *axes = linexpr(*coeff_var, return_axes=True) - - def masked_term(coeff, var, cols): - return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ - .reindex(index=axes[0], columns=axes[1], fill_value='').values - - lhs += masked_term(eff_stand, previous_e_cyclic, cyclic_i) - lhs += masked_term(eff_stand.loc[sns[1:]], e.shift().loc[sns[1:]], noncyclic_i) - - rhs = pd.DataFrame(0, sns, stores_i) - rhs.loc[sns[0], noncyclic_i] -= n.df(c)['e_initial'][noncyclic_i] - - constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'soc') - - -def define_global_constraints(n, sns): - glcs = n.global_constraints.query('type == "primary_energy"') - for name, glc in glcs.iterrows(): - carattr = glc.carrier_attribute - emissions = n.carriers.query(f'{carattr} != 0')[carattr] - if emissions.empty: continue - gens = n.generators.query('carrier in @emissions.index') - em_pu = gens.carrier.map(emissions)/gens.efficiency - em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T - vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) - lhs = join_exprs(vals) - rhs = glc.constant - - #storage units - sus = n.storage_units.query('carrier in @emissions.index and ' - 'not cyclic_state_of_charge') - sus_i = sus.index - if not sus.empty: - vals = linexpr((-sus.carrier.map(emissions), - get_var(n, 'StorageUnit', 'state_of_charge').loc[sns[-1], sus_i])) - lhs = lhs + '\n' + join_exprs(vals) - rhs -= sus.carrier.map(emissions) @ sus.state_of_charge_initial - - #stores - n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) - stores = n.stores.query('carrier in @emissions.index and not e_cyclic') - if not stores.empty: - vals = linexpr((-stores.carrier.map(n.emissions), - get_var(n, 'Store', 'e').loc[sns[-1], stores.index])) - lhs = lhs + '\n' + join_exprs(vals) - rhs -= stores.carrier.map(emissions) @ stores.state_of_charge_initial - - - con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) - - #expansion limits - glcs = n.global_constraints.query('type == ' - '"transmission_volume_expansion_limit"') - substr = lambda s: re.sub('[\[\]\(\)]', '', s) - for name, glc in glcs.iterrows(): - carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] - lines_ext_i = n.lines.query(f'carrier in @carattr ' - 'and s_nom_extendable').index - links_ext_i = n.links.query(f'carrier in @carattr ' - 'and p_nom_extendable').index - linevars = linexpr((n.lines.length[lines_ext_i], - get_var(n, 'Line', 's_nom')[lines_ext_i])) - linkvars = linexpr((n.links.length[links_ext_i], - get_var(n, 'Link', 'p_nom')[links_ext_i])) - lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) - sense = glc.sense - rhs = glc.constant - con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) - - #expansion cost limits - glcs = n.global_constraints.query('type == ' - '"transmission_expansion_cost_limit"') - for name, glc in glcs.iterrows(): - carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] - lines_ext_i = n.lines.query(f'carrier in @carattr ' - 'and s_nom_extendable').index - links_ext_i = n.links.query(f'carrier in @carattr ' - 'and p_nom_extendable').index - linevars = linexpr((n.lines.capital_cost[lines_ext_i], - get_var(n, 'Line', 's_nom')[lines_ext_i])) - linkvars = linexpr((n.links.capital_cost[links_ext_i], - get_var(n, 'Link', 'p_nom')[links_ext_i])) - lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) - sense = glc.sense - rhs = glc.constant - con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) - - -def define_objective(n): - for c, attr in lookup.query('marginal_cost').index: - cost = (get_as_dense(n, c, 'marginal_cost') - .loc[:, lambda ds: (ds != 0).all()] - .mul(n.snapshot_weightings, axis=0)) - if cost.empty: continue - terms = linexpr((cost, get_var(n, c, attr)[cost.columns])) - for t in terms.flatten(): - n.objective_f.write(t) - #investment - for c, attr in nominals.items(): - cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] - if cost.empty: continue - terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' - for t in terms.flatten(): - n.objective_f.write(t) - - - -def prepare_lopf(n, snapshots=None, keep_files=False, - extra_functionality=None): - reset_counter() - - #used in kirchhoff and globals - n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) - - cols = ['component', 'name', 'pnl', 'specification'] - n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) - n.constraints = pd.DataFrame(columns=cols).set_index(cols[:2]) - - snapshots = n.snapshots if snapshots is None else snapshots - start = time.time() - def time_info(message): - logger.info(f'{message} {round(time.time()-start, 2)}s') - - n.identifier = ''.join(random.choice(string.ascii_lowercase) - for i in range(8)) - objective_fn = f"/tmp/objective-{n.identifier}.txt" - constraints_fn = f"/tmp/constraints-{n.identifier}.txt" - bounds_fn = f"/tmp/bounds-{n.identifier}.txt" - n.problem_fn = f"/tmp/test-{n.identifier}.lp" - - n.objective_f = open(objective_fn, mode='w') - n.constraints_f = open(constraints_fn, mode='w') - n.bounds_f = open(bounds_fn, mode='w') - - n.objective_f.write('\* LOPF *\n\nmin\nobj:\n') - n.constraints_f.write("\n\ns.t.\n\n") - n.bounds_f.write("\nbounds\n") - - - for c, attr in lookup.query('nominal and not handle_separately').index: - define_nominal_for_extendable_variables(n, c, attr) - define_fixed_variariable_constraints(n, snapshots, c, attr, pnl=False) - for c, attr in lookup.query('not nominal and not handle_separately').index: - define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) - define_dispatch_for_extendable_variables(n, snapshots, c, attr) - define_dispatch_for_extendable_constraints(n, snapshots, c, attr) - define_fixed_variariable_constraints(n, snapshots, c, attr) - - define_ramp_limit_constraints(n, snapshots) - define_storage_unit_constraints(n, snapshots) - define_store_constraints(n, snapshots) - define_kirchhoff_constraints(n) - define_nodal_balance_constraints(n, snapshots) - define_global_constraints(n, snapshots) - define_objective(n) - - if extra_functionality is not None: - extra_functionality(n, snapshots) - - n.objective_f.close() - n.constraints_f.close() - n.bounds_f.write("end\n") - n.bounds_f.close() - - del n.objective_f - del n.constraints_f - del n.bounds_f - - os.system(f"cat {objective_fn} {constraints_fn} {bounds_fn} " - f"> {n.problem_fn}") - - time_info('Total preparation time:') - - if not keep_files: - for fn in [objective_fn, constraints_fn, bounds_fn]: - os.system("rm "+ fn) - - -def assign_solution(n, sns, variables_sol, constraints_dual, - extra_postprocessing, keep_references=False): - pop = not keep_references - #solutions - def map_solution(c, attr, pnl): - if pnl: - variables = get_var(n, c, attr, pop=pop) - if variables.empty: return - values = variables.stack().map(variables_sol).unstack() - if c in n.passive_branch_components: - n.pnl(c)['p0'] = values - n.pnl(c)['p1'] = - values - elif c == 'Link': - n.pnl(c)['p0'] = values - n.pnl(c)['p1'] = - values * n.df(c).efficiency - else: - n.pnl(c)[attr] = values - elif not get_extendable_i(n, c).empty: - n.df(c)[attr+'_opt'] = get_var(n, c, attr, pop=pop)\ - .map(variables_sol).fillna(n.df(c)[attr]) - else: - n.df(c)[attr+'_opt'] = n.df(c)[attr] - - for (c, attr), pnl in n.variables.pnl.items(): - map_solution(c, attr, pnl) - - if not n.df('StorageUnit').empty: - c = 'StorageUnit' - n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] - - #duals - def map_dual(c, attr, pnl): - if pnl: - n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() - .map(-constraints_dual).unstack()) - else: - n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(-constraints_dual) - - for (c, attr), pnl in n.constraints.pnl.items(): - map_dual(c, attr, pnl) - - #load - n.loads_t.p = n.loads_t.p_set - - #injection, why does it include injection in hvdc 'network' - ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), - ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), - ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] - sign = lambda c: n.df(c).sign if 'sign' in n.df(c) else -1 #sign for 'Link' - n.buses_t.p = pd.concat( - [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) - for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum() - - def v_ang_for_(sub): - buses_i = sub.buses_o - if len(buses_i) == 1: return - sub.calculate_B_H(skip_pre=True) - if len(sub.buses_i()) == 1: return - Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) - Z -= Z[sub.slack_bus] - return n.buses_t.p[buses_i] @ Z - n.buses_t.v_ang = (pd.concat( - [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) - .reindex(columns=n.buses.index, fill_value=0)) - - - - -def network_lopf(n, snapshots=None, solver_name="cbc", - solver_logfile=None, extra_functionality=None, - extra_postprocessing=None, formulation="kirchhoff", - keep_references=False, keep_files=False, solver_options={}, - warmstart=False, store_basis=True): - """ - Linear optimal power flow for a group of snapshots. - - Parameters - ---------- - snapshots : list or index slice - A list of snapshots to optimise, must be a subset of - network.snapshots, defaults to network.snapshots - solver_name : string - Must be a solver name that pyomo recognises and that is - installed, e.g. "glpk", "gurobi" - skip_pre : bool, default False - Skip the preliminary steps of computing topology, calculating - dependent values and finding bus controls. - extra_functionality : callable function - This function must take two arguments - `extra_functionality(network,snapshots)` and is called after - the model building is complete, but before it is sent to the - solver. It allows the user to - add/change constraints and add/change the objective function. - solver_logfile : None|string - If not None, sets the logfile option of the solver. - solver_options : dictionary - A dictionary with additional options that get passed to the solver. - (e.g. {'threads':2} tells gurobi to use only 2 cpus) - keep_files : bool, default False - Keep the files that pyomo constructs from OPF problem - construction, e.g. .lp file - useful for debugging - formulation : string - Formulation of the linear power flow equations to use; only "kirchhoff" - is currently supported - extra_postprocessing : callable function - This function must take three arguments - `extra_postprocessing(network,snapshots,duals)` and is called after - the model has solved and the results are extracted. It allows the user to - extract further information about the solution, such as additional - shadow prices. - - Returns - ------- - None - """ - supported_solvers = ["cbc", "gurobi", 'glpk', 'scs'] - if solver_name not in supported_solvers: - raise NotImplementedError(f"Solver {solver_name} not in " - f"supported solvers: {supported_solvers}") - - if formulation != "kirchhoff": - raise NotImplementedError("Only the kirchhoff formulation is supported") - - #disable logging because multiple slack bus calculations, keep output clean - snapshots = _as_snapshots(n, snapshots) - n.calculate_dependent_values() - n.determine_network_topology() - - if solver_logfile is None: - solver_logfile = "test.log" - - logger.info("Prepare linear problem") - prepare_lopf(n, snapshots, keep_files, extra_functionality) - gc.collect() - solution_fn = "/tmp/test-{}.sol".format(n.identifier) - - if warmstart == True: - warmstart = n.basis_fn - logger.info("Solve linear problem using warmstart") - else: - logger.info("Solve linear problem") - - solve = eval(f'run_and_read_{solver_name}') - res = solve(n, n.problem_fn, solution_fn, solver_logfile, - solver_options, keep_files, warmstart, store_basis) - status, termination_condition, variables_sol, constraints_dual, obj = res - del n.problem_fn - - if termination_condition != "optimal": - return status,termination_condition - - #adjust objective value - for c, attr in nominals.items(): - obj -= n.df(c)[attr] @ n.df(c).capital_cost - n.objective = obj - gc.collect() - assign_solution(n, snapshots, variables_sol, constraints_dual, - extra_postprocessing, keep_references=keep_references) - gc.collect() - - return status,termination_condition - - -def ilopf(n, snapshots=None, msq_threshold=0.05, min_iterations=1, - max_iterations=100, **kwargs): - ''' - Iterative linear optimization updating the line parameters for passive - AC and DC lines. This is helpful when line expansion is enabled. After each - sucessful solving, line impedances and line resistance are recalculated - based on the optimization result. If warmstart is possible, it uses the - result from the previous iteration to fasten the optimization. - - Parameters - ---------- - snapshots : list or index slice - A list of snapshots to optimise, must be a subset of - network.snapshots, defaults to network.snapshots - msq_threshold: float, default 0.05 - Maximal mean square difference between optimized line capacity of - the current and the previous iteration. As soon as this threshold is - undercut, and the number of iterations is bigger than 'min_iterations' - the iterative optimization stops - min_iterations : integer, default 1 - Minimal number of iteration to run regardless whether the msq_threshold - is already undercut - max_iterations : integer, default 100 - Maximal numbder of iterations to run regardless whether msq_threshold - is already undercut - **kwargs - Keyword arguments of the lopf function which runs at each iteration - - ''' - - ext_i = get_extendable_i(n, 'Line') - typed_i = n.lines.query('type != ""').index - ext_untyped_i = ext_i.difference(typed_i) - ext_typed_i = ext_i & typed_i - base_s_nom = (np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * - n.lines.bus0.map(n.buses.v_nom)) - n.lines.loc[ext_typed_i, 'num_parallel'] = (n.lines.s_nom/base_s_nom)[ext_typed_i] - - def update_line_params(n, s_nom_prev): - factor = n.lines.s_nom_opt / s_nom_prev - for attr, carrier in (('x', 'AC'), ('r', 'DC')): - ln_i = (n.lines.query('carrier == @carrier').index & ext_untyped_i) - n.lines.loc[ln_i, attr] /= factor[ln_i] - ln_i = ext_i & typed_i - n.lines.loc[ln_i, 'num_parallel'] = (n.lines.s_nom_opt/base_s_nom)[ln_i] - - def msq_diff(n, s_nom_prev): - lines_err = np.sqrt((s_nom_prev - n.lines.s_nom_opt).pow(2).mean()) / \ - n.lines['s_nom_opt'].mean() - logger.info(f"Mean square difference after iteration {iteration} is " - f"{lines_err}") - return lines_err - - iteration = 0 - diff = msq_threshold - while diff >= msq_threshold or iteration < min_iterations: - if iteration >= max_iterations: - logger.info(f'Iteration {iteration} beyond max_iterations ' - f'{max_iterations}. Stopping ...') - break - - s_nom_prev = n.lines.s_nom_opt if iteration else n.lines.s_nom - kwargs['warmstart'] = bool(iteration and ('basis_fn' in n.__dir__())) - network_lopf(n, snapshots, **kwargs) - update_line_params(n, s_nom_prev) - diff = msq_diff(n, s_nom_prev) - iteration += 1 - - - -# ============================================================================= -# test/double-check constraints -# ============================================================================= - - -def describe_storage_unit_contraints(n): - """ - Checks whether all storage units are balanced over time. This function - requires the network to contain the separate variables p_store and - p_dispatch, since they cannot be reconstructed from p. The latter results - from times tau where p_store(tau) > 0 **and** p_dispatch(tau) > 0, which - is allowed (even though not economic). Therefor p_store is necessarily - equal to negative entries of p, vice versa for p_dispatch. - """ - sus = n.storage_units - sus_i = sus.index - if sus_i.empty: return - sns = n.snapshots - c = 'StorageUnit' - pnl = n.pnl(c) - - description = {} - - eh = expand_series(n.snapshot_weightings, sus_i) - stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - dispatch_eff = expand_series(n.df(c).efficiency_dispatch, sns).T - store_eff = expand_series(n.df(c).efficiency_store, sns).T - inflow = get_as_dense(n, c, 'inflow') * eh - spill = eh[pnl.spill.columns] * pnl.spill - - description['Spillage Limit'] = pd.Series({'min': - (inflow[spill.columns] - spill).min().min()}) - - if 'p_store' in pnl: - soc = pnl.state_of_charge - - store = store_eff * eh * pnl.p_store#.clip(upper=0) - dispatch = 1/dispatch_eff * eh * pnl.p_dispatch#(lower=0) - start = soc.iloc[-1].where(sus.cyclic_state_of_charge, - sus.state_of_charge_initial) - previous_soc = stand_eff * soc.shift().fillna(start) - - - reconstructed = (previous_soc.add(store, fill_value=0) - .add(inflow, fill_value=0) - .add(-dispatch, fill_value=0) - .add(-spill, fill_value=0)) - description['SOC Balance StorageUnit'] = ((reconstructed - soc) - .unstack().describe()) - else: - logging.info('Storage Unit SOC balance not reconstructable as no ' - 'p_store and p_dispatch in n.storage_units_t.') - return pd.concat(description, axis=1, sort=False) - - -def describe_nodal_balance_constraint(n): - """ - Helper function to double check whether network flow is balanced - """ - network_injection = pd.concat( - [n.pnl(c)[f'p{inout}'].rename(columns=n.df(c)[f'bus{inout}']) - for inout in (0, 1) for c in ('Line', 'Transformer')], axis=1)\ - .groupby(level=0, axis=1).sum() - return (n.buses_t.p - network_injection).unstack().describe()\ - .to_frame('Nodal Balance Constr.') - -def describe_upper_dispatch_constraints(n): - ''' - Recalculates the minimum gap between operational status and nominal capacity - ''' - description = {} - key = ' Upper Limit' - for c, attr in nominals.items(): - dispatch_attr = 'p0' if c in ['Line', 'Transformer', 'Link'] else attr[0] - description[c + key] = pd.Series({'min': - (n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_max_pu') - - n.pnl(c)[dispatch_attr]).min().min()}) - return pd.concat(description, axis=1) - - -def describe_lower_dispatch_constraints(n): - description = {} - key = ' Lower Limit' - for c, attr in nominals.items(): - if c in ['Line', 'Transformer', 'Link']: - dispatch_attr = 'p0' - description[c] = pd.Series({'min': - (n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_max_pu') + - n.pnl(c)[dispatch_attr]).min().min()}) - else: - dispatch_attr = attr[0] - description[c + key] = pd.Series({'min': - (-n.df(c)[attr + '_opt'] * - get_as_dense(n, c, attr[0] + '_min_pu') + - n.pnl(c)[dispatch_attr]).min().min()}) - return pd.concat(description, axis=1) - - -def describe_store_contraints(n): - """ - Checks whether all stores are balanced over time. - """ - stores = n.stores - stores_i = stores.index - if stores_i.empty: return - sns = n.snapshots - c = 'Store' - pnl = n.pnl(c) - - eh = expand_series(n.snapshot_weightings, stores_i) - stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) - - start = pnl.e.iloc[-1].where(stores.e_cyclic, stores.e_initial) - previous_e = stand_eff * pnl.e.shift().fillna(start) - - return (previous_e - pnl.p - pnl.e).unstack().describe()\ - .to_frame('SOC Balance Store') - - -def describe_cycle_constraints(n): - weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) - - def cycle_flow(sub): - C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) - if C.empty: - return None - C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) - return C_weighted.apply(lambda ds: ds @ n.lines_t.p0[ds.index].T) - - return pd.concat([cycle_flow(sub) for sub in n.sub_networks.obj], axis=0)\ - .unstack().describe().to_frame('Cycle Constr.') - - - -def constraint_stats(n, round_digit=1e-30): - """ - Post-optimization function to recalculate gap statistics of different - constraints. For inequality constraints only the minimum of lhs - rhs, with - lhs >= rhs is returned. - """ - return pd.concat([describe_cycle_constraints(n), - describe_store_contraints(n), - describe_storage_unit_contraints(n), - describe_nodal_balance_constraint(n), - describe_lower_dispatch_constraints(n), - describe_upper_dispatch_constraints(n)], - axis=1, sort=False) - -def check_constraints(n, tol=1e-3): - """ - Post-optimization test function to double-check most of the lopf - constraints. For relevant equaility constraints, it test whether the - deviation between lhs and rhs is below the given tolerance. For inequality - constraints, it test whether the inequality is violated with a higher - value then the tolerance. - - Parameters - ---------- - n : pypsa.Network - tol : float - Gap tolerance - - Returns AssertionError if tolerance is exceeded. - - """ - stats = constraint_stats(n).rename(index=str.title) - condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T - assert condition.empty, (f'The following constraint(s) are exceeding the ' - f'given tolerance of {tol}: \n{condition}') - - - - diff --git a/pypsa/stats.py b/pypsa/stats.py new file mode 100644 index 000000000..d08621fbd --- /dev/null +++ b/pypsa/stats.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Post-solving statistics of network. This module contains functions to anaylise +an optimized network. Basic information of network can be summarized as well as +constraint gaps can be double-checked. +""" + +from .descriptors import (expand_series, get_switchable_as_dense as get_as_dense, + nominal_attrs) +import pandas as pd +import logging + + +#Place summerize function of pypsa-eur here + + +def describe_storage_unit_contraints(n): + """ + Checks whether all storage units are balanced over time. This function + requires the network to contain the separate variables p_store and + p_dispatch, since they cannot be reconstructed from p. The latter results + from times tau where p_store(tau) > 0 **and** p_dispatch(tau) > 0, which + is allowed (even though not economic). Therefor p_store is necessarily + equal to negative entries of p, vice versa for p_dispatch. + """ + sus = n.storage_units + sus_i = sus.index + if sus_i.empty: return + sns = n.snapshots + c = 'StorageUnit' + pnl = n.pnl(c) + + description = {} + + eh = expand_series(n.snapshot_weightings, sus_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + dispatch_eff = expand_series(n.df(c).efficiency_dispatch, sns).T + store_eff = expand_series(n.df(c).efficiency_store, sns).T + inflow = get_as_dense(n, c, 'inflow') * eh + spill = eh[pnl.spill.columns] * pnl.spill + + description['Spillage Limit'] = pd.Series({'min': + (inflow[spill.columns] - spill).min().min()}) + + if 'p_store' in pnl: + soc = pnl.state_of_charge + + store = store_eff * eh * pnl.p_store#.clip(upper=0) + dispatch = 1/dispatch_eff * eh * pnl.p_dispatch#(lower=0) + start = soc.iloc[-1].where(sus.cyclic_state_of_charge, + sus.state_of_charge_initial) + previous_soc = stand_eff * soc.shift().fillna(start) + + + reconstructed = (previous_soc.add(store, fill_value=0) + .add(inflow, fill_value=0) + .add(-dispatch, fill_value=0) + .add(-spill, fill_value=0)) + description['SOC Balance StorageUnit'] = ((reconstructed - soc) + .unstack().describe()) + else: + logging.info('Storage Unit SOC balance not reconstructable as no ' + 'p_store and p_dispatch in n.storage_units_t.') + return pd.concat(description, axis=1, sort=False) + + +def describe_nodal_balance_constraint(n): + """ + Helper function to double check whether network flow is balanced + """ + network_injection = pd.concat( + [n.pnl(c)[f'p{inout}'].rename(columns=n.df(c)[f'bus{inout}']) + for inout in (0, 1) for c in ('Line', 'Transformer')], axis=1)\ + .groupby(level=0, axis=1).sum() + return (n.buses_t.p - network_injection).unstack().describe()\ + .to_frame('Nodal Balance Constr.') + +def describe_upper_dispatch_constraints(n): + ''' + Recalculates the minimum gap between operational status and nominal capacity + ''' + description = {} + key = ' Upper Limit' + for c, attr in nominal_attrs.items(): + dispatch_attr = 'p0' if c in ['Line', 'Transformer', 'Link'] else attr[0] + description[c + key] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') - + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_lower_dispatch_constraints(n): + description = {} + key = ' Lower Limit' + for c, attr in nominal_attrs.items(): + if c in ['Line', 'Transformer', 'Link']: + dispatch_attr = 'p0' + description[c] = pd.Series({'min': + (n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_max_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + else: + dispatch_attr = attr[0] + description[c + key] = pd.Series({'min': + (-n.df(c)[attr + '_opt'] * + get_as_dense(n, c, attr[0] + '_min_pu') + + n.pnl(c)[dispatch_attr]).min().min()}) + return pd.concat(description, axis=1) + + +def describe_store_contraints(n): + """ + Checks whether all stores are balanced over time. + """ + stores = n.stores + stores_i = stores.index + if stores_i.empty: return + sns = n.snapshots + c = 'Store' + pnl = n.pnl(c) + + eh = expand_series(n.snapshot_weightings, stores_i) + stand_eff = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) + + start = pnl.e.iloc[-1].where(stores.e_cyclic, stores.e_initial) + previous_e = stand_eff * pnl.e.shift().fillna(start) + + return (previous_e - pnl.p - pnl.e).unstack().describe()\ + .to_frame('SOC Balance Store') + + +def describe_cycle_constraints(n): + weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + + def cycle_flow(sub): + C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + if C.empty: + return None + C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + return C_weighted.apply(lambda ds: ds @ n.lines_t.p0[ds.index].T) + + return pd.concat([cycle_flow(sub) for sub in n.sub_networks.obj], axis=0)\ + .unstack().describe().to_frame('Cycle Constr.') + + + +def constraint_stats(n, round_digit=1e-30): + """ + Post-optimization function to recalculate gap statistics of different + constraints. For inequality constraints only the minimum of lhs - rhs, with + lhs >= rhs is returned. + """ + return pd.concat([describe_cycle_constraints(n), + describe_store_contraints(n), + describe_storage_unit_contraints(n), + describe_nodal_balance_constraint(n), + describe_lower_dispatch_constraints(n), + describe_upper_dispatch_constraints(n)], + axis=1, sort=False) + +def check_constraints(n, tol=1e-3): + """ + Post-optimization test function to double-check most of the lopf + constraints. For relevant equaility constraints, it test whether the + deviation between lhs and rhs is below the given tolerance. For inequality + constraints, it test whether the inequality is violated with a higher + value then the tolerance. + + Parameters + ---------- + n : pypsa.Network + tol : float + Gap tolerance + + Returns AssertionError if tolerance is exceeded. + + """ + stats = constraint_stats(n).rename(index=str.title) + condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T + assert condition.empty, (f'The following constraint(s) are exceeding the ' + f'given tolerance of {tol}: \n{condition}') + + + + diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 07d14cbf6..d38100eb6 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -9,6 +9,7 @@ from numpy.testing import assert_array_almost_equal as equal +import sys def test_lopf(): @@ -41,14 +42,15 @@ def test_lopf(): equal(n.links_t.p0.loc[:,n.links.index], n_r.links_t.p0.loc[:,n.links.index],decimal=4) - n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) + if sys.version_info.major >= 3: + n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) - equal(n.generators_t.p.loc[:,n.generators.index], - n_r.generators_t.p.loc[:,n.generators.index],decimal=4) - equal(n.lines_t.p0.loc[:,n.lines.index], - n_r.lines_t.p0.loc[:,n.lines.index],decimal=4) - equal(n.links_t.p0.loc[:,n.links.index], - n_r.links_t.p0.loc[:,n.links.index],decimal=4) + equal(n.generators_t.p.loc[:,n.generators.index], + n_r.generators_t.p.loc[:,n.generators.index],decimal=4) + equal(n.lines_t.p0.loc[:,n.lines.index], + n_r.lines_t.p0.loc[:,n.lines.index],decimal=4) + equal(n.links_t.p0.loc[:,n.links.index], + n_r.links_t.p0.loc[:,n.links.index],decimal=4) diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index fd14336a9..9a518ef8e 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -5,7 +5,7 @@ import pandas as pd -from itertools import product +import sys import os @@ -25,13 +25,21 @@ def test_opf(pyomo=True): target_gen_p = pd.read_csv(target_path, index_col=0) #test results were generated with GLPK and other solvers may differ - for solver_name, pyomo in product(["cbc", "glpk"], [True, False]): - solver_name = "glpk" + for solver_name in ["cbc", "glpk"]: - n.lopf(solver_name=solver_name, pyomo=pyomo) + n.lopf(solver_name=solver_name, pyomo=True) equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) + if sys.version_info.major >= 3: + + for solver_name in ["cbc", "glpk"]: + + n.lopf(solver_name=solver_name, pyomo=False) + + equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, + decimal=2) + if __name__ == "__main__": test_opf() From 2ecee3efa349df0fdf49f68ef63b6ab43a8d3a1a Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Oct 2019 13:12:45 +0200 Subject: [PATCH 014/111] linopf fix tiny bug --- pypsa/linopf.py | 4 ++-- pypsa/stats.py | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 30c1acfcd..ab9eb6dd5 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -58,7 +58,7 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): def define_dispatch_for_non_extendable_variables(n, sns, c, attr): fix_i = get_non_extendable_i(n, c) if fix_i.empty: return - nominal_fix = n.df(c)[nominal_attrs.at[c]][fix_i] + nominal_fix = n.df(c)[nominal_attrs[c]][fix_i] min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) lower = min_pu.mul(nominal_fix) upper = max_pu.mul(nominal_fix) @@ -71,7 +71,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): if ext_i.empty: return min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) operational_ext_v = get_var(n, c, attr)[ext_i] - nominal_v = get_var(n, c, nominal_attrs.at[c])[ext_i] + nominal_v = get_var(n, c, nominal_attrs[c])[ext_i] rhs = 0 lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), diff --git a/pypsa/stats.py b/pypsa/stats.py index d08621fbd..7869a22d8 100644 --- a/pypsa/stats.py +++ b/pypsa/stats.py @@ -11,8 +11,39 @@ import pandas as pd import logging +idx = pd.IndexSlice -#Place summerize function of pypsa-eur here + +# ============================================================================= +# Network summary +# ============================================================================= + +opt_name = {"Store": "e", "Line" : "s", "Transformer" : "s"} + +def calculate_costs(n): + raise NotImplementedError + mc = {} + for c in n.iterate_comonents(): + if 'marginal_cost' in c.df: + + mc[c] = c.df @ c.pnl['p'] + + +def calculate_curtailment(n): + max_pu = n.generators_t.p_max_pu + avail = (max_pu.multiply(n.generators.p_nom_opt.loc[max_pu.columns]).sum() + .groupby(n.generators.carrier).sum()) + used = (n.generators_t.p[max_pu.columns].sum() + .groupby(n.generators.carrier).sum()) + return (((avail - used)/avail)*100).round(3) + + +# and others from pypsa-eur + + +# ============================================================================= +# gap analysis +# ============================================================================= def describe_storage_unit_contraints(n): From 0ff03aaac455d762f7f0ed7d0b047849ebc8ee69 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Oct 2019 15:15:37 +0200 Subject: [PATCH 015/111] travis include python2.7 again --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index ace811d36..b95580f0a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,8 +6,8 @@ sudo: false # Use container-based infrastructure matrix: include: - # - env: - # - PYTHON_VERSION="2.7" + - env: + - PYTHON_VERSION="2.7" - env: - PYTHON_VERSION="3.6" - env: From d6134ac36faee88a2e386f4562c706926a9fec92 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Oct 2019 15:59:24 +0200 Subject: [PATCH 016/111] disable new code for python version < 3 --- pypsa/__init__.py | 11 +++++++++-- pypsa/components.py | 7 +++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/pypsa/__init__.py b/pypsa/__init__.py index 8cfc9fc74..b3abda22c 100644 --- a/pypsa/__init__.py +++ b/pypsa/__init__.py @@ -26,8 +26,15 @@ from __future__ import absolute_import from . import components, descriptors -from . import (pf, opf, opt, plot, networkclustering, io, contingency, geo, - linopf, linopt, stats) +from . import pf, opf, opt, plot, networkclustering, io, contingency, geo, stats + +import sys + +#do this as long as python 2.7 should be supported +if sys.version_info.major >= 3: + from . import linopf, linopt + + from .components import Network, SubNetwork diff --git a/pypsa/components.py b/pypsa/components.py index adb46bc0e..00d031673 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -61,12 +61,15 @@ from .opf import network_lopf, network_opf -from .linopf import network_lopf as network_lopf_lowmem - from .plot import plot, iplot from .graph import graph, incidence_matrix, adjacency_matrix +import sys + +if sys.version_info.major >= 3: + from .linopf import network_lopf as network_lopf_lowmem + import logging logger = logging.getLogger(__name__) From 508a3bdfd3007b6603c063bcf947cedd6543c237 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Oct 2019 16:38:39 +0200 Subject: [PATCH 017/111] travis: disable python2.7 again --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b95580f0a..ace811d36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,8 +6,8 @@ sudo: false # Use container-based infrastructure matrix: include: - - env: - - PYTHON_VERSION="2.7" + # - env: + # - PYTHON_VERSION="2.7" - env: - PYTHON_VERSION="3.6" - env: From 53ac07b8c8ffeb2c8a519e7fdcf09ffa8e8b50f9 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 18 Oct 2019 13:01:28 +0200 Subject: [PATCH 018/111] linopf: complete docstrings --- pypsa/linopf.py | 113 ++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 109 insertions(+), 4 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index ab9eb6dd5..24430cc36 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -40,6 +40,18 @@ index_col=['component', 'variable']) def define_nominal_for_extendable_variables(n, c, attr): + """ + Initializes variables for nominal capacities for a given component and a + given attribute. + + Parameters + ---------- + n : pypsa.Network + c : str + network component of which the nominal capacity should be defined + attr : str + name of the variable, e.g. 'p_nom' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return lower = n.df(c)[attr+'_min'][ext_i] @@ -49,6 +61,18 @@ def define_nominal_for_extendable_variables(n, c, attr): def define_dispatch_for_extendable_variables(n, sns, c, attr): + """ + Initializes variables for power dispatch for a given component and a + given attribute. + + Parameters + ---------- + n : pypsa.Network + c : str + name of the network component + attr : str + name of the attribute, e.g. 'p' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return variables = write_bound(n, -np.inf, np.inf, axes=[sns, ext_i]) @@ -56,6 +80,18 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): def define_dispatch_for_non_extendable_variables(n, sns, c, attr): + """ + Initializes variables for power dispatch for a given component and a + given attribute. + + Parameters + ---------- + n : pypsa.Network + c : str + name of the network component + attr : str + name of the attribute, e.g. 'p' + """ fix_i = get_non_extendable_i(n, c) if fix_i.empty: return nominal_fix = n.df(c)[nominal_attrs[c]][fix_i] @@ -67,6 +103,18 @@ def define_dispatch_for_non_extendable_variables(n, sns, c, attr): def define_dispatch_for_extendable_constraints(n, sns, c, attr): + """ + Sets power dispatch constraints for extendable devices for a given + component and a given attribute. + + Parameters + ---------- + n : pypsa.Network + c : str + name of the network component + attr : str + name of the attribute, e.g. 'p' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return min_pu, max_pu = get_bounds_pu(n, c, sns, ext_i, attr) @@ -86,6 +134,22 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): + """ + Sets constraints for fixing variables of a given component and attribute + to the corresponding values in n.df(c)[attr + '_set'] if pnl is True, or + n.pnl(c)[attr + '_set'] + + Parameters + ---------- + n : pypsa.Network + c : str + name of the network component + attr : str + name of the attribute, e.g. 'p' + pnl : bool, default True + Whether variable which should be fixed is time-dependent + """ + if pnl: if attr + '_set' not in n.pnl(c): return fix = n.pnl(c)[attr + '_set'].unstack().dropna() @@ -102,6 +166,9 @@ def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): def define_ramp_limit_constraints(n, sns): + """ + Defines ramp limits for generators wiht valid ramplimit + """ c = 'Generator' rup_i = n.df(c).query('ramp_limit_up == ramp_limit_up').index rdown_i = n.df(c).query('ramp_limit_down == ramp_limit_down').index @@ -146,6 +213,9 @@ def define_ramp_limit_constraints(n, sns): def define_nodal_balance_constraints(n, sns): + """ + Defines nodal balance constraint. + """ def bus_injection(c, attr, groupcol='bus', sign=1): #additional sign only necessary for branches in reverse direction @@ -175,6 +245,9 @@ def bus_injection(c, attr, groupcol='bus', sign=1): def define_kirchhoff_constraints(n): + """ + Defines Kirchhoff voltage constraints + """ weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) def cycle_flow(ds): @@ -195,6 +268,12 @@ def cycle_flow(ds): def define_storage_unit_constraints(n, sns): + """ + Defines state of charge (soc) constraints for storage units. In principal + the constraints states: + + previous_soc + p_store - p_dispatch + inflow - spill == soc + """ sus_i = n.storage_units.index if sus_i.empty: return c = 'StorageUnit' @@ -203,7 +282,6 @@ def define_storage_unit_constraints(n, sns): spill = write_bound(n, 0, upper) set_varref(n, spill, 'StorageUnit', 'spill') - #soc constraint previous_soc + p_store - p_dispatch + inflow - spill == soc eh = expand_series(n.snapshot_weightings, sus_i) #elapsed hours eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) @@ -237,13 +315,17 @@ def masked_term(coeff, var, cols): def define_store_constraints(n, sns): + """ + Defines energy balance constraints for stores. In principal this states: + + previous_e - p == e + """ stores_i = n.stores.index if stores_i.empty: return c = 'Store' variables = write_bound(n, -np.inf, np.inf, axes=[sns, stores_i]) set_varref(n, variables, c, 'p') - #previous_e - p == e eh = expand_series(n.snapshot_weightings, stores_i) #elapsed hours eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) @@ -272,6 +354,19 @@ def masked_term(coeff, var, cols): def define_global_constraints(n, sns): + """ + Defines global constraints for the optimization. Possible types are + + 1. primary_energy + Use this to constraint the byproducts of primary energy sources as + CO2 + 2. transmission_volume_expansion_limit + Use this to set a limit for line volume expansion. Possible carriers + are 'AC' and 'DC' + 3. transmission_expansion_cost_limit + Use this to set a limit for line expansion costs. Possible carriers + are 'AC' and 'DC' + """ glcs = n.global_constraints.query('type == "primary_energy"') for name, glc in glcs.iterrows(): carattr = glc.carrier_attribute @@ -348,6 +443,9 @@ def define_global_constraints(n, sns): def define_objective(n): + """ + Defines and writes out the objective function + """ for c, attr in lookup.query('marginal_cost').index: cost = (get_as_dense(n, c, 'marginal_cost') .loc[:, lambda ds: (ds != 0).all()] @@ -368,6 +466,10 @@ def define_objective(n): def prepare_lopf(n, snapshots=None, keep_files=False, extra_functionality=None): + """ + Sets up the linear problem and writes it out to a lp file, stored at + n.problem_fn + """ reset_counter() #used in kirchhoff and globals @@ -439,6 +541,10 @@ def time_info(message): def assign_solution(n, sns, variables_sol, constraints_dual, extra_postprocessing, keep_references=False): + """ + Helper function. Assigns the solution of a succesful optimization to the + network. + """ pop = not keep_references #solutions def map_solution(c, attr, pnl): @@ -481,7 +587,7 @@ def map_dual(c, attr, pnl): #load n.loads_t.p = n.loads_t.p_set - #injection, why does it include injection in hvdc 'network' + #injection, why does it 'exclude' injection in hvdc 'network'? ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] @@ -663,7 +769,6 @@ def msq_diff(n, s_nom_prev): s_nom_prev = n.lines.s_nom_opt if iteration else n.lines.s_nom kwargs['warmstart'] = bool(iteration and ('basis_fn' in n.__dir__())) -# import pdb; pdb.set_trace() network_lopf(n, snapshots, **kwargs) update_line_params(n, s_nom_prev) diff = msq_diff(n, s_nom_prev) From aca70c6234f31e0123f4dc62058777b7c9c80cd0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 18 Oct 2019 13:19:49 +0200 Subject: [PATCH 019/111] linopt: add docstrings for run_and_read_ --- pypsa/linopt.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 62b63bc5b..66892aacb 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -288,6 +288,13 @@ def get_con(n, c, attr, pop=False): def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart=None, store_basis=True): + """ + Solving function. Reads the linear problem file and passes it to the cbc + solver. If the solution is sucessful it returns variable solutions and + constraint dual values. + + For more information on the solver options, run 'cbc' in your shell + """ #printingOptions is about what goes in solution file command = f"cbc -printingOptions all -import {problem_fn} " if warmstart: @@ -338,7 +345,14 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart=None, store_basis=True): - # for solver_options lookup https://kam.mff.cuni.cz/~elias/glpk.pdf + """ + Solving function. Reads the linear problem file and passes it to the glpk + solver. If the solution is sucessful it returns variable solutions and + constraint dual values. + + For more information on the glpk solver options: + https://kam.mff.cuni.cz/~elias/glpk.pdf + """ command = (f"glpsol --lp {problem_fn} --output {solution_fn}") if solver_logfile is not None: command += f' --log {solver_logfile}' @@ -386,9 +400,15 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart=None, store_basis=True): + """ + Solving function. Reads the linear problem file and passes it to the gurobi + solver. If the solution is sucessful it returns variable solutions and + constraint dual values. Gurobipy must be installed for using this function + + For more information on solver options: + https://www.gurobi.com/documentation/{gurobi_verion}/refman/parameter_descriptions.html + """ import gurobipy - # for solver options see - # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html if (solver_logfile is not None) and (solver_options is not None): solver_options["logfile"] = solver_logfile From 3093ebd8346499fb4eae368461108dd77084fd60 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 18 Oct 2019 15:16:57 +0200 Subject: [PATCH 020/111] linopf: cover case of lv limit but no extendable lines --- pypsa/linopf.py | 45 +++++++++++++++++++++++++-------------------- pypsa/linopt.py | 3 ++- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 24430cc36..94459c0e9 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -407,16 +407,14 @@ def define_global_constraints(n, sns): '"transmission_volume_expansion_limit"') substr = lambda s: re.sub('[\[\]\(\)]', '', s) for name, glc in glcs.iterrows(): - carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] - lines_ext_i = n.lines.query(f'carrier in @carattr ' - 'and s_nom_extendable').index - links_ext_i = n.links.query(f'carrier in @carattr ' - 'and p_nom_extendable').index - linevars = linexpr((n.lines.length[lines_ext_i], - get_var(n, 'Line', 's_nom')[lines_ext_i])) - linkvars = linexpr((n.links.length[links_ext_i], - get_var(n, 'Link', 'p_nom')[links_ext_i])) - lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + car = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lhs = '' + for c, attr in (('Line', 's_nom'), ('Link', 'p_nom')): + ext_i = n.df(c).query(f'carrier in @car and {attr}_extendable').index + if ext_i.empty: continue + v = linexpr((n.df(c).length[ext_i], get_var(n, c, attr)[ext_i])) + lhs += join_exprs(v) + '\n' + if lhs == '': continue sense = glc.sense rhs = glc.constant con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) @@ -426,16 +424,14 @@ def define_global_constraints(n, sns): glcs = n.global_constraints.query('type == ' '"transmission_expansion_cost_limit"') for name, glc in glcs.iterrows(): - carattr = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] - lines_ext_i = n.lines.query(f'carrier in @carattr ' - 'and s_nom_extendable').index - links_ext_i = n.links.query(f'carrier in @carattr ' - 'and p_nom_extendable').index - linevars = linexpr((n.lines.capital_cost[lines_ext_i], - get_var(n, 'Line', 's_nom')[lines_ext_i])) - linkvars = linexpr((n.links.capital_cost[links_ext_i], - get_var(n, 'Link', 'p_nom')[links_ext_i])) - lhs = join_exprs(linevars) + '\n' + join_exprs(linkvars) + car = [substr(c.strip()) for c in glc.carrier_attribute.split(',')] + lhs = '' + for c, attr in (('Line', 's_nom'), ('Link', 'p_nom')): + ext_i = n.df(c).query(f'carrier in @car and {attr}_extendable').index + if ext_i.empty: continue + v = linexpr((n.df(c).capital_cost[ext_i], get_var(n, c, attr)[ext_i])) + lhs += join_exprs(v) + '\n' + if lhs == '': continue sense = glc.sense rhs = glc.constant con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) @@ -773,4 +769,13 @@ def msq_diff(n, s_nom_prev): update_line_params(n, s_nom_prev) diff = msq_diff(n, s_nom_prev) iteration += 1 + logger.info('Running last lopf with fixed branches, overwrite p_nom ' + 'for links and s_nom for lines') + ext_links_i = get_extendable_i(n, 'Link') + n.lines[['s_nom', 's_nom_extendable']] = n.lines['s_nom_opt'], False + n.links[['p_nom', 'p_nom_extendable']] = n.links['p_nom_opt'], False + network_lopf(n, snapshots, **kwargs) + n.lines.loc[ext_i, 's_nom_extendable'] = True + n.links.loc[ext_links_i, 'p_nom_extendable'] = True + diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 66892aacb..9f82ac6be 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -18,6 +18,7 @@ import numpy as np from pandas import IndexSlice as idx +logger = logging.getLogger(__name__) # ============================================================================= # writing functions @@ -428,7 +429,7 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, try: m.write(n.basis_fn) except gurobipy.GurobiError: - logging.info('No model basis stored') + logger.info('No model basis stored') del n.basis_fn if not keep_files: From 3a636abc5e714892ac089fa37da7e842fe5b158d Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 22 Oct 2019 10:04:29 +0200 Subject: [PATCH 021/111] linopf: handle case of bus with no injection --- pypsa/linopf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 94459c0e9..b122b8220 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -599,7 +599,7 @@ def v_ang_for_(sub): if len(sub.buses_i()) == 1: return Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) Z -= Z[sub.slack_bus] - return n.buses_t.p[buses_i] @ Z + return n.buses_t.p.reindex(columns=buses_i) @ Z n.buses_t.v_ang = (pd.concat( [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) .reindex(columns=n.buses.index, fill_value=0)) From eab33d2a384889c40c16f893404972827cdef541 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 24 Oct 2019 21:07:27 +0200 Subject: [PATCH 022/111] io: fix import export with new shadow prices --- pypsa/component_attrs/generators.csv | 5 +++++ pypsa/component_attrs/links.csv | 3 ++- pypsa/component_attrs/storage_units.csv | 5 +++++ pypsa/component_attrs/stores.csv | 5 ++++- pypsa/component_attrs/sub_networks.csv | 1 + pypsa/io.py | 13 ++++++------- pypsa/linopf.py | 18 +++++++++++------- 7 files changed, 34 insertions(+), 16 deletions(-) diff --git a/pypsa/component_attrs/generators.csv b/pypsa/component_attrs/generators.csv index aa806f9ab..f08d1cce9 100644 --- a/pypsa/component_attrs/generators.csv +++ b/pypsa/component_attrs/generators.csv @@ -30,3 +30,8 @@ p,series,MW,0.,active power at bus (positive if net generation),Output q,series,MVar,0.,reactive power (positive if net generation),Output p_nom_opt,float,MW,0.,Optimised nominal power.,Output status,series,n/a,1,"Status (1 is on, 0 is off). Only outputted if committable is True.",Output +mu_upper,series,currency/MWh,0.,Shadow price of upper p_nom limit,Output +mu_lower,series,currency/MWh,0.,Shadow price of lower p_nom limit,Output +mu_p_set,series,currency/MWh,0.,Shadow price of fixed power generation p_set,Output +mu_ramp_limit_up,series,currency/MWh,0.,Shadow price of upper ramp up limit,Output +mu_ramp_limit_down,series,currency/MWh,0.,Shadow price of lower ramp down limit,Output diff --git a/pypsa/component_attrs/links.csv b/pypsa/component_attrs/links.csv index a8cd131a8..719d234e9 100644 --- a/pypsa/component_attrs/links.csv +++ b/pypsa/component_attrs/links.csv @@ -19,4 +19,5 @@ p0,series,MW,0.,Active power at bus0 (positive if branch is withdrawing power fr p1,series,MW,0.,Active power at bus1 (positive if branch is withdrawing power from bus1).,Output p_nom_opt,float,MVA,0.,Optimised capacity for active power.,Output mu_lower,series,currency/MVA,0.,Shadow price of lower p_nom limit -F \leq f. Always non-negative.,Output -mu_upper,series,currency/MVA,0.,Shadow price of upper p_nom limit f \leq F. Always non-negative.,Output \ No newline at end of file +mu_upper,series,currency/MVA,0.,Shadow price of upper p_nom limit f \leq F. Always non-negative.,Output +mu_p_set,series,currency/MWh,0.,Shadow price of fixed power transmission p_set,Output diff --git a/pypsa/component_attrs/storage_units.csv b/pypsa/component_attrs/storage_units.csv index 64962f0ff..de7bdd177 100644 --- a/pypsa/component_attrs/storage_units.csv +++ b/pypsa/component_attrs/storage_units.csv @@ -24,7 +24,12 @@ efficiency_dispatch,float,per unit,1.,Efficiency of storage on the way out of th standing_loss,float,per unit,0.,Losses per hour to state of charge.,Input (optional) inflow,static or series,MW,0.,"Inflow to the state of charge, e.g. due to river inflow in hydro reservoir.",Input (optional) p,series,MW,0.,active power at bus (positive if net generation),Output +p_dispatch,series,MW,0.,active power dispatch at bus,Output +p_store,series,MW,0.,active power charging at bus,Output q,series,MVar,0.,reactive power (positive if net generation),Output state_of_charge,series,MWh,NaN,State of charge as calculated by the OPF.,Output spill,series,MW,0.,Spillage for each snapshot.,Output p_nom_opt,float,MW,0.,Optimised nominal power.,Output +mu_upper,series,currency/MWh,0.,Shadow price of upper p_nom limit,Output +mu_lower,series,currency/MWh,0.,Shadow price of lower p_nom limit,Output +mu_state_of_charge_set,series,currency/MWh,0.,Shadow price of fixed state of charge state_of_charge_set,Output diff --git a/pypsa/component_attrs/stores.csv b/pypsa/component_attrs/stores.csv index 5d5e032a8..48223176d 100644 --- a/pypsa/component_attrs/stores.csv +++ b/pypsa/component_attrs/stores.csv @@ -19,4 +19,7 @@ standing_loss,float,per unit,0.,Losses per hour to energy.,Input (optional) p,series,MW,0.,active power at bus (positive if net generation),Output q,series,MVar,0.,reactive power (positive if net generation),Output e,series,MWh,0.,Energy as calculated by the OPF.,Output -e_nom_opt,float,MW,0.,Optimised nominal energy capacity outputed by OPF.,Output \ No newline at end of file +e_nom_opt,float,MW,0.,Optimised nominal energy capacity outputed by OPF.,Output +mu_upper,series,currency/MWh,0.,Shadow price of upper e_nom limit,Output +mu_lower,series,currency/MWh,0.,Shadow price of lower e_nom limit,Output +mu_e_set,series,currency/MWh,0.,Shadow price of fixed energy level e_set,Output diff --git a/pypsa/component_attrs/sub_networks.csv b/pypsa/component_attrs/sub_networks.csv index fec1459b0..acee5bcf6 100644 --- a/pypsa/component_attrs/sub_networks.csv +++ b/pypsa/component_attrs/sub_networks.csv @@ -2,3 +2,4 @@ attribute,type,unit,default,description,status name,string,n/a,n/a,Unique name based on order of sub-network in list of sub-networks.,Output carrier,string,n/a,AC,"Energy carrier: could be for example ""AC"" or ""DC"" (for electrical networks) or ""gas"" or ""heat"". The carrier is determined from the buses in sub_network.",Output slack_bus,string,n/a,n/a,Name of slack bus.,Output +mu_kirchhoff_voltage_law,series,currency/MWh,n/a,Shadow price of KVL constraint per cycle,Output \ No newline at end of file diff --git a/pypsa/io.py b/pypsa/io.py index ed72ebbeb..14b7e67e2 100644 --- a/pypsa/io.py +++ b/pypsa/io.py @@ -19,7 +19,7 @@ # make the code as Python 3 compatible as possible from __future__ import division, absolute_import from six import iteritems, iterkeys, string_types -from six.moves import filter, range +from six.moves import range __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), GNU GPL 3" @@ -32,7 +32,6 @@ from glob import glob import pandas as pd -import pypsa import numpy as np import math @@ -315,7 +314,7 @@ def _export_to_exporter(network, exporter, basename, export_standard_types=False exporter.save_snapshots(snapshots) exported_components = [] - for component in network.all_components - {"SubNetwork"}: + for component in network.all_components: list_name = network.components[component]["list_name"] attrs = network.components[component]["attrs"] @@ -335,7 +334,7 @@ def _export_to_exporter(network, exporter, basename, export_standard_types=False col_export = [] for col in df.columns: # do not export derived attributes - if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu"]: + if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu", "obj"]: continue if col in attrs.index and pd.isnull(attrs.at[col, "default"]) and pd.isnull(df[col]).all(): continue @@ -592,7 +591,7 @@ def _import_from_importer(network, importer, basename, skip_time=False): imported_components = [] # now read in other components; make sure buses and carriers come first - for component in ["Bus", "Carrier"] + sorted(network.all_components - {"Bus", "Carrier", "SubNetwork"}): + for component in ["Bus", "Carrier"] + sorted(network.all_components - {"Bus", "Carrier"}): list_name = network.components[component]["list_name"] df = importer.get_static(list_name) @@ -732,7 +731,7 @@ def import_series_from_dataframe(network, dataframe, cls_name, attr): >>> import numpy as np >>> network.set_snapshots(range(10)) >>> network.import_series_from_dataframe( - pd.DataFrame(np.random.rand(10,4), + pd.DataFrame(np.random.rand(10,4), columns=network.generators.index, index=range(10)), "Generator", @@ -934,7 +933,7 @@ def import_from_pandapower_net(network, net, extra_line_data=False): Importing from pandapower is still in beta; not all pandapower data is supported. - + Unsupported features include: - three-winding transformers - switches diff --git a/pypsa/linopf.py b/pypsa/linopf.py index b122b8220..645c1f52b 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -255,16 +255,19 @@ def cycle_flow(ds): vals = linexpr((ds, get_var(n, 'Line', 's')[ds.index])) + '\n' return vals.sum(1) + sns = get_var(n, 'Line', 's').index constraints = [] for sub in n.sub_networks.obj: C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) if C.empty: continue C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) - con = write_constraint(n, C_weighted.apply(cycle_flow), '=', 0) + cycle_sum = C_weighted.apply(cycle_flow) + cycle_sum.index = sns + con = write_constraint(n, cycle_sum, '=', 0) constraints.append(con) constraints = pd.concat(constraints, axis=1, ignore_index=True) - set_conref(n, constraints, 'Line', 'kirchhoff_voltage') + set_conref(n, constraints, 'SubNetwork', 'mu_kirchhoff_voltage_law') def define_storage_unit_constraints(n, sns): @@ -311,7 +314,7 @@ def masked_term(coeff, var, cols): rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'soc') + set_conref(n, constraints, c, 'mu_state_of_charge') def define_store_constraints(n, sns): @@ -350,7 +353,7 @@ def masked_term(coeff, var, cols): rhs.loc[sns[0], noncyclic_i] -= n.df(c)['e_initial'][noncyclic_i] constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'soc') + set_conref(n, constraints, c, 'mu_state_of_charge') def define_global_constraints(n, sns): @@ -573,12 +576,14 @@ def map_solution(c, attr, pnl): def map_dual(c, attr, pnl): if pnl: n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() - .map(-constraints_dual).unstack()) + .map(-constraints_dual).unstack()) else: n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(-constraints_dual) for (c, attr), pnl in n.constraints.pnl.items(): map_dual(c, attr, pnl) + if attr == 'mu_state_of_charge': + n.pnl(c).pop(attr) #load n.loads_t.p = n.loads_t.p_set @@ -603,8 +608,7 @@ def v_ang_for_(sub): n.buses_t.v_ang = (pd.concat( [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) .reindex(columns=n.buses.index, fill_value=0)) - - + n.buses_t['marginal_price'] = n.buses_t.pop('nodal_balance') def network_lopf(n, snapshots=None, solver_name="cbc", From d2c9c7e3b5ef588d0eb338be4585c945043db6a6 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 25 Oct 2019 12:37:27 +0200 Subject: [PATCH 023/111] io: revert exporting subnetwork frame and time-series (shadow price of KVL) --- pypsa/component_attrs/sub_networks.csv | 1 - pypsa/io.py | 13 +++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pypsa/component_attrs/sub_networks.csv b/pypsa/component_attrs/sub_networks.csv index acee5bcf6..fec1459b0 100644 --- a/pypsa/component_attrs/sub_networks.csv +++ b/pypsa/component_attrs/sub_networks.csv @@ -2,4 +2,3 @@ attribute,type,unit,default,description,status name,string,n/a,n/a,Unique name based on order of sub-network in list of sub-networks.,Output carrier,string,n/a,AC,"Energy carrier: could be for example ""AC"" or ""DC"" (for electrical networks) or ""gas"" or ""heat"". The carrier is determined from the buses in sub_network.",Output slack_bus,string,n/a,n/a,Name of slack bus.,Output -mu_kirchhoff_voltage_law,series,currency/MWh,n/a,Shadow price of KVL constraint per cycle,Output \ No newline at end of file diff --git a/pypsa/io.py b/pypsa/io.py index 14b7e67e2..ed72ebbeb 100644 --- a/pypsa/io.py +++ b/pypsa/io.py @@ -19,7 +19,7 @@ # make the code as Python 3 compatible as possible from __future__ import division, absolute_import from six import iteritems, iterkeys, string_types -from six.moves import range +from six.moves import filter, range __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), GNU GPL 3" @@ -32,6 +32,7 @@ from glob import glob import pandas as pd +import pypsa import numpy as np import math @@ -314,7 +315,7 @@ def _export_to_exporter(network, exporter, basename, export_standard_types=False exporter.save_snapshots(snapshots) exported_components = [] - for component in network.all_components: + for component in network.all_components - {"SubNetwork"}: list_name = network.components[component]["list_name"] attrs = network.components[component]["attrs"] @@ -334,7 +335,7 @@ def _export_to_exporter(network, exporter, basename, export_standard_types=False col_export = [] for col in df.columns: # do not export derived attributes - if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu", "obj"]: + if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu"]: continue if col in attrs.index and pd.isnull(attrs.at[col, "default"]) and pd.isnull(df[col]).all(): continue @@ -591,7 +592,7 @@ def _import_from_importer(network, importer, basename, skip_time=False): imported_components = [] # now read in other components; make sure buses and carriers come first - for component in ["Bus", "Carrier"] + sorted(network.all_components - {"Bus", "Carrier"}): + for component in ["Bus", "Carrier"] + sorted(network.all_components - {"Bus", "Carrier", "SubNetwork"}): list_name = network.components[component]["list_name"] df = importer.get_static(list_name) @@ -731,7 +732,7 @@ def import_series_from_dataframe(network, dataframe, cls_name, attr): >>> import numpy as np >>> network.set_snapshots(range(10)) >>> network.import_series_from_dataframe( - pd.DataFrame(np.random.rand(10,4), + pd.DataFrame(np.random.rand(10,4), columns=network.generators.index, index=range(10)), "Generator", @@ -933,7 +934,7 @@ def import_from_pandapower_net(network, net, extra_line_data=False): Importing from pandapower is still in beta; not all pandapower data is supported. - + Unsupported features include: - three-winding transformers - switches From 8ebe4cb83ecbe3e8364b5089708e7d55d4752872 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 25 Oct 2019 14:10:58 +0200 Subject: [PATCH 024/111] linopf: add argument keep_shadowprices, to simplify dual extracting --- pypsa/components.py | 24 ++++++++++---- pypsa/linopf.py | 79 +++++++++++++++++++++++++++++++-------------- 2 files changed, 72 insertions(+), 31 deletions(-) diff --git a/pypsa/components.py b/pypsa/components.py index 00d031673..f871ba5af 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -437,12 +437,6 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, the model building is complete, but before it is sent to the solver. It allows the user to add/change constraints and add/change the objective function. - extra_postprocessing : callable function - This function must take three arguments - `extra_postprocessing(network,snapshots,duals)` and is called after - the model has solved and the results are extracted. It allows the user to - extract further information about the solution, such as additional shadow prices. - These arguments can be used if pyomo is set to False: ----------------------------------------------------- @@ -455,6 +449,16 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, the path to the basis file is saved in network.basis_fn. Note that a basis can only be stored if simplex, dual-simplex, or barrier *with* crossover is used for solving. + keep_references : bool, default False + Keep the references of variable and constraint names withing the + network, e.g. n.generators_t.p_varref - useful for constructing + extra_functionality or debugging + keep_shadowprices : bool or list of component names, default None + Keep shadow prices for all constraints, if set to True. + These are stored at e.g. n.generators_t.mu_upper for upper limit + of p_nom. If a list of component names is passed, shadow + prices of variables attached to those are extracted. If set to None, + components default to ['Bus', 'Line', 'GlobalConstraint'] These arguments can be used if pyomo is set to True: @@ -471,6 +475,13 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, skip_pre : bool, default False Skip the preliminary steps of computing topology, calculating dependent values and finding bus controls. + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user + to extract further information about the solution, such as additional + shadow prices. + Returns ------- @@ -479,7 +490,6 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, args = {'snapshots': snapshots, 'keep_files': keep_files, 'solver_options': solver_options, 'formulation': formulation, 'extra_functionality': extra_functionality, - 'extra_postprocessing': extra_postprocessing, 'solver_name': solver_name, 'solver_logfile': solver_logfile} args.update(kwargs) if pyomo: diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 645c1f52b..74b3da1ff 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -241,7 +241,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): .groupby(n.loads.bus, axis=1).sum() .reindex(columns=n.buses.index, fill_value=0)) constraints = write_constraint(n, lhs, sense, rhs) - set_conref(n, constraints, 'Bus', 'nodal_balance') + set_conref(n, constraints, 'Bus', 'marginal_price') def define_kirchhoff_constraints(n): @@ -539,7 +539,7 @@ def time_info(message): def assign_solution(n, sns, variables_sol, constraints_dual, - extra_postprocessing, keep_references=False): + keep_references=False, keep_shadowprices=None): """ Helper function. Assigns the solution of a succesful optimization to the network. @@ -574,21 +574,31 @@ def map_solution(c, attr, pnl): #duals def map_dual(c, attr, pnl): + sign = 1 if 'upper' in attr else -1 if pnl: n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() - .map(-constraints_dual).unstack()) + .map(sign * constraints_dual).unstack()) else: - n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(-constraints_dual) + n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(sign* constraints_dual) + + + if keep_shadowprices == False: + keep_shadowprices = [] + elif keep_shadowprices is None: + keep_shadowprices = ['Bus', 'Line', 'GlobalConstraint'] for (c, attr), pnl in n.constraints.pnl.items(): - map_dual(c, attr, pnl) - if attr == 'mu_state_of_charge': - n.pnl(c).pop(attr) + if keep_shadowprices == True: + map_dual(c, attr, pnl) + elif c in keep_shadowprices: + map_dual(c, attr, pnl) + else: + get_con(n, c, attr, pop=True) #load n.loads_t.p = n.loads_t.p_set - #injection, why does it 'exclude' injection in hvdc 'network'? + # recalculate injection ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] @@ -608,13 +618,13 @@ def v_ang_for_(sub): n.buses_t.v_ang = (pd.concat( [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) .reindex(columns=n.buses.index, fill_value=0)) - n.buses_t['marginal_price'] = n.buses_t.pop('nodal_balance') def network_lopf(n, snapshots=None, solver_name="cbc", solver_logfile=None, extra_functionality=None, extra_postprocessing=None, formulation="kirchhoff", - keep_references=False, keep_files=False, solver_options={}, + keep_references=False, keep_files=False, + keep_shadowprices=None, solver_options={}, warmstart=False, store_basis=True): """ Linear optimal power flow for a group of snapshots. @@ -627,15 +637,9 @@ def network_lopf(n, snapshots=None, solver_name="cbc", solver_name : string Must be a solver name that pyomo recognises and that is installed, e.g. "glpk", "gurobi" - skip_pre : bool, default False - Skip the preliminary steps of computing topology, calculating - dependent values and finding bus controls. - extra_functionality : callable function - This function must take two arguments - `extra_functionality(network,snapshots)` and is called after - the model building is complete, but before it is sent to the - solver. It allows the user to - add/change constraints and add/change the objective function. + pyomo : bool, default True + Whether to use pyomo for building and solving the model, setting + this to False saves a lot of memory and time. solver_logfile : None|string If not None, sets the logfile option of the solver. solver_options : dictionary @@ -645,14 +649,40 @@ def network_lopf(n, snapshots=None, solver_name="cbc", Keep the files that pyomo constructs from OPF problem construction, e.g. .lp file - useful for debugging formulation : string - Formulation of the linear power flow equations to use; only "kirchhoff" - is currently supported + Formulation of the linear power flow equations to use; must be + one of ["angles","cycles","kirchhoff","ptdf"] + extra_functionality : callable function + This function must take two arguments + `extra_functionality(network,snapshots)` and is called after + the model building is complete, but before it is sent to the + solver. It allows the user to + add/change constraints and add/change the objective function. extra_postprocessing : callable function This function must take three arguments `extra_postprocessing(network,snapshots,duals)` and is called after - the model has solved and the results are extracted. It allows the user to - extract further information about the solution, such as additional + the model has solved and the results are extracted. It allows the user + to extract further information about the solution, such as additional shadow prices. + warmstart : bool or string, default False + Use this to warmstart the optimization. Pass a string which gives + the path to the basis file. If set to True, a path to + a basis file must be given in network.basis_fn. + store_basis : bool, default True + Whether to store the basis of the optimization results. If True, + the path to the basis file is saved in network.basis_fn. Note that + a basis can only be stored if simplex, dual-simplex, or barrier + *with* crossover is used for solving. + keep_references : bool, default False + Keep the references of variable and constraint names withing the + network, e.g. n.generators_t.p_varref - useful for constructing + extra_functionality or debugging + keep_shadowprices : bool or list of component names, default None + Keep shadow prices for all constraints, if set to True. + These are stored at e.g. n.generators_t.mu_upper for upper limit + of p_nom. If a list of component names is passed, shadow + prices of variables attached to those are extracted. If set to None, + components default to ['Bus', 'Line', 'GlobalConstraint'] + Returns ------- @@ -700,7 +730,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", n.objective = obj gc.collect() assign_solution(n, snapshots, variables_sol, constraints_dual, - extra_postprocessing, keep_references=keep_references) + keep_references=keep_references, + keep_shadowprices=keep_shadowprices) gc.collect() return status,termination_condition From b85f68563dc85866e7f0916d53fa9c816a6afa02 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 11:45:02 +0100 Subject: [PATCH 025/111] update docs --- pypsa/components.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pypsa/components.py b/pypsa/components.py index f871ba5af..af71ec5c2 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -438,8 +438,13 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, solver. It allows the user to add/change constraints and add/change the objective function. - These arguments can be used if pyomo is set to False: - ----------------------------------------------------- + Returns + ------- + None + + Other Parameters + ---------------- + warmstart : bool or string, default False Use this to warmstart the optimization. Pass a string which gives the path to the basis file. If set to True, a path to @@ -460,9 +465,6 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, prices of variables attached to those are extracted. If set to None, components default to ['Bus', 'Line', 'GlobalConstraint'] - - These arguments can be used if pyomo is set to True: - ---------------------------------------------------- ptdf_tolerance : float Value below which PTDF entries are ignored free_memory : set, default {'pyomo'} @@ -483,9 +485,6 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, shadow prices. - Returns - ------- - None """ args = {'snapshots': snapshots, 'keep_files': keep_files, 'solver_options': solver_options, 'formulation': formulation, From 39cfda411cde9d59810d478bee934aa2e271a491 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 16:27:37 +0100 Subject: [PATCH 026/111] update docs II linopt: add argument as_pandas to linexpr --- doc/optimal_power_flow.rst | 236 +++++++++++++++++++------------------ pypsa/components.py | 50 ++++---- pypsa/linopf.py | 48 +++++--- pypsa/linopt.py | 38 ++++-- 4 files changed, 204 insertions(+), 168 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index ebbe5e51c..ee951416f 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -1,30 +1,34 @@ -###################### - Optimal Power Flow -###################### +########################### + Linear Optimal Power Flow +########################### -See the module ``pypsa.opf``. +See the module ``pypsa.opf`` and ``pypsa.linopf``. Optimisation with the linearised power flow equations for (mixed) AC +and DC networks is fully supported. +All constraints and variables are listed below. -Non-Linear Optimal Power Flow -============================== -Optimisation with the full non-linear power flow equations is not yet -supported. +Overview +-------- +* The linear OPF module can optimise the dispatch of generation and storage and the capacities of generation, storage and transmission infrastructure. +* It is assumed that the load is inelastic and must be met in every snapshot (this will be relaxed in future versions). +* The optimisation currently uses continuous variables for most functionality; unit commitment with binary variables is also implemented for generators. -Linear Optimal Power Flow -========================= -Optimisation with the linearised power flow equations for (mixed) AC -and DC networks is fully supported. +* The objective function is the total system cost for the snapshots optimised. -All constraints and variables are listed below. +* Each snapshot can be given a weighting :math:`w_t` to represent e.g. multiple hours. + +* This set-up can also be used for stochastic optimisation, if you interpret the weighting as a probability. + +* Each transmission asset has a capital cost. + +* Each generation and storage asset has a capital cost and a marginal cost. -Overview --------- Execute: @@ -48,31 +52,12 @@ for more details). .. automethod:: pypsa.Network.lopf -The linear OPF module can optimise the dispatch of generation and storage -and the capacities of generation, storage and transmission infrastructure. - -It is assumed that the load is inelastic and must be met in every -snapshot (this will be relaxed in future versions). - -The optimisation currently uses continuous variables for most -functionality; unit commitment with binary variables is also -implemented for generators. - -The objective function is the total system cost for the snapshots -optimised. - -Each snapshot can be given a weighting :math:`w_t` to represent -e.g. multiple hours. -This set-up can also be used for stochastic optimisation, if you -interpret the weighting as a probability. -Each transmission asset has a capital cost. +.. important:: Since pypsa v0.15, the package enable the optimisation without the use of `pyomo `_. This make the lopf function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. + Note that when setting pyomo to False, the ``extra_functionality`` has to be adapted to the appropriate syntax. -Each generation and storage asset has a capital cost and a marginal cost. - - -.. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the LOPF, so the network equations may no longer be valid. Note also that all the expansion is continuous. +.. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however pypsa.linopf.ilopf covers this through an iterative process as done `in here `_. Optimising dispatch only: a market model @@ -89,7 +74,7 @@ point-to-point HVDC link). Optimising total annual system costs ------------------------------------- +---------------------------------------- To minimise long-run annual system costs for meeting an inelastic electrical load, capital costs for transmission and generation should be set to @@ -117,41 +102,28 @@ functionality is planned. Variables and notation summary ------------------------------ -:math:`n \in N = \{0,\dots |N|-1\}` label the buses - -:math:`t \in T = \{0,\dots |T|-1\}` label the snapshots - -:math:`l \in L = \{0,\dots |L|-1\}` label the branches - -:math:`s \in S = \{0,\dots |S|-1\}` label the different generator/storage types at each bus - -:math:`w_t` weighting of time :math:`t` in the objective function - -:math:`g_{n,s,t}` dispatch of generator :math:`s` at bus :math:`n` at time :math:`t` - -:math:`\bar{g}_{n,s}` nominal power of generator :math:`s` at bus :math:`n` - -:math:`\bar{g}_{n,s,t}` availability of generator :math:`s` at bus :math:`n` at time :math:`t` per unit of nominal power - -:math:`u_{n,s,t}` binary status variable for generator with unit commitment - -:math:`suc_{n,s,t}` start-up cost if generator with unit commitment is started at time :math:`t` - -:math:`sdc_{n,s,t}` shut-down cost if generator with unit commitment is shut down at time :math:`t` - -:math:`c_{n,s}` capital cost of extending generator nominal power by one MW - -:math:`o_{n,s}` marginal cost of dispatch generator for one MWh - -:math:`f_{l,t}` flow of power in branch :math:`l` at time :math:`t` - -:math:`F_{l}` capacity of branch :math:`l` - -:math:`\eta_{n,s}` efficiency of generator :math:`s` at bus :math:`n` - -:math:`\eta_{l}` efficiency of controllable link :math:`l` - -:math:`e_s` CO2-equivalent-tonne-per-MWh of the fuel carrier :math:`s` +.. csv-table:: + :widths: 20 50 + :delim: ; + + :math:`n \in N = \{0,\dots |N|-1\}`; label the buses + :math:`t \in T = \{0,\dots |T|-1\}`; label the snapshots + :math:`l \in L = \{0,\dots |L|-1\}`; label the branches + :math:`s \in S = \{0,\dots |S|-1\}`; label the different generator/storage types at each bus + :math:`w_t`; weighting of time :math:`t` in the objective function + :math:`g_{n,s,t}`; dispatch of generator :math:`s` at bus :math:`n` at time :math:`t` + :math:`\bar{g}_{n,s}`; nominal power of generator :math:`s` at bus :math:`n` + :math:`\bar{g}_{n,s,t}`; availability of generator :math:`s` at bus :math:`n` at time :math:`t` per unit of nominal power + :math:`u_{n,s,t}`; binary status variable for generator with unit commitment + :math:`suc_{n,s,t}`; start-up cost if generator with unit commitment is started at time :math:`t` + :math:`sdc_{n,s,t}`; shut-down cost if generator with unit commitment is shut down at time :math:`t` + :math:`c_{n,s}`; capital cost of extending generator nominal power by one MW + :math:`o_{n,s}`; marginal cost of dispatch generator for one MWh + :math:`f_{l,t}`; flow of power in branch :math:`l` at time :math:`t` + :math:`F_{l}`; capacity of branch :math:`l` + :math:`\eta_{n,s}`; efficiency of generator :math:`s` at bus :math:`n` + :math:`\eta_{l}`; efficiency of controllable link :math:`l` + :math:`e_s`; CO2-equivalent-tonne-per-MWh of the fuel carrier :math:`s` Further definitions are given below. @@ -213,7 +185,7 @@ availability is a constant. If the generator's nominal power :math:`\bar{g}_{n,s}` is also the -subject of optimisation (``generator.p_nom_extendable == True``) then +subject of optimisation (``generator.p_nom_extendable -- True``) then limits ``generator.p_nom_min`` and ``generator.p_nom_max`` on the installable nominal power may also be introduced, e.g. @@ -231,6 +203,7 @@ installable nominal power may also be introduced, e.g. Generator unit commitment constraints ------------------------------------- + These are defined in ``pypsa.opf.define_generator_variables_constraints(network,snapshots)``. The implementation follows Chapter 4.3 of `Convex Optimization of Power Systems `_ by @@ -276,6 +249,7 @@ so that it is only non-zero if :math:`u_{n,s,t} - u_{n,s,t-1} = 1`, i.e. the gen Generator ramping constraints ----------------------------- + These are defined in ``pypsa.opf.define_generator_variables_constraints(network,snapshots)``. The implementation follows Chapter 4.3 of `Convex Optimization of Power Systems `_ by @@ -304,7 +278,7 @@ at start-up :math:`rusu_{n,s}` and shut-down :math:`rdsd_{n,s}` \end{gather*} Storage Unit constraints ------------------------- +------------------------- These are defined in ``pypsa.opf.define_storage_variables_constraints(network,snapshots)``. @@ -363,7 +337,7 @@ storage unit where the state of charge must empty every day.) Store constraints ------------------------- +------------------ These are defined in ``pypsa.opf.define_store_variables_constraints(network,snapshots)``. @@ -403,7 +377,8 @@ optimisation assumes :math:`e_{n,s,t=-1} = e_{n,s,t=|T|-1}`. Passive branch flows: lines and transformers --------------------------------------------- +--------------------------------------------- + See ``pypsa.opf.define_passive_branch_flows(network,snapshots)`` and ``pypsa.opf.define_passive_branch_constraints(network,snapshots)`` and ``pypsa.opf.define_branch_extension_variables(network,snapshots)``. @@ -430,20 +405,17 @@ This flow is the limited by the capacity :math:``F_l`` of the line .. math:: |f_{l,t}| \leq F_l -Note that if :math:`F_l` is also subject to optimisation -(``branch.s_nom_extendable == True``), then the impedance :math:`x` of -the line is NOT automatically changed with the capacity (to represent -e.g. parallel lines being added). +.. note:: + If :math:`F_l` is also subject to optimisation + (``branch.s_nom_extendable -- True``), then the impedance :math:`x` of + the line is NOT automatically changed with the capacity (to represent + e.g. parallel lines being added). -There are two choices here: + There are two choices here: -Iterate the LOPF again with the updated impedances (see e.g. ``_). + 1. Iterate the LOPF again with the updated impedances, see e.g. ``_, like done by ``pypsa.linopf.ilopf`` -João Gorenstein Dedecca has also implemented a MILP version of the -transmission expansion, see -``_, which properly takes -account of the impedance with a disjunctive relaxation. This will be -pulled into the main PyPSA code base soon. + 2. João Gorenstein Dedecca has also implemented a MILP version of the transmission expansion, see ``_, which properly takes account of the impedance with a disjunctive relaxation. This will be pulled into the main PyPSA code base soon. .. _formulations: @@ -451,6 +423,8 @@ pulled into the main PyPSA code base soon. Passive branch flow formulations -------------------------------- + + PyPSA implements four formulations of the linear power flow equations that are mathematically equivalent, but may have different solving times. These different formulations are described and @@ -478,7 +452,9 @@ generators at most nodes. .. _opf-links: Controllable branch flows: links ---------------------------------- +-------------------------------- + + See ``pypsa.opf.define_controllable_branch_flows(network,snapshots)`` and ``pypsa.opf.define_branch_extension_variables(network,snapshots)``. @@ -506,6 +482,7 @@ efficiencies ``efficiencyi``, i.e. :math:`\eta_{i,l}`, then at Nodal power balances -------------------- + See ``pypsa.opf.define_nodal_balances(network,snapshots)``. This is the most important equation, which guarantees that the power @@ -529,6 +506,7 @@ feeding in and out of it (i.e. like Kirchhoff's Current Law). Global constraints ------------------ + See ``pypsa.opf.define_global_constraints(network,snapshots)``. Global constraints apply to more than one component. @@ -563,16 +541,24 @@ optimisation stored in ``network.global_constraints.mu``. Custom constraints and other functionality ------------------------------------------ -PyPSA uses the Python optimisation language `pyomo -`_ to construct the OPF problem. You can easily -extend the optimisation problem constructed by PyPSA using the usual -pyomo syntax. To do this, pass the function ``network.lopf`` a + +Since PyPSA v0.15, the lopf is provided by two different modules. The ordinary implementation based on the ``pypsa.opf`` module uses +`pyomo `_ to set up the linear problem and passing it to the solver. The implementation without pyomo, based on the module ``pypsa.linopf``, uses a straight-forward approach to write out the lp file directly and explicitly running it from a solver's interface. Therefore application of custom constraints depend on whether pyomo activated or not. + +In general for a custom constraint, pass the function ``network.lopf`` a function ``extra_functionality`` as an argument. This function must take two arguments ``extra_functionality(network,snapshots)`` and is called after the model building is complete, but before it is sent to the solver. It allows the user to add, change or remove constraints and alter the objective function. +1. pyomo is set to True +================================= + +You can easily +extend the optimisation problem constructed by PyPSA using the usual +pyomo syntax. + The `CHP example `_ and the `example that replaces generators and storage units with fundamental links @@ -587,55 +573,79 @@ arguments `extra_postprocessing(network,snapshots,duals)`. It allows the user to extract further information about the solution, such as additional shadow prices for constraints. +2. pyomo is set to False +======================== + +In general when pyomo is disabled, all variable and constraint references are stored in the network object itself. Thus every variable and constraint is attached to component, e.g. the dispatch variable of network.generators.p is attached to the component 'Generator' and can be easily accessed by + + >>> get_var(n, 'Generator', 'p') + +An additional constraint can easily be implemented by using the funtions + +* ``pypsa.linopt.get_var`` for getting the variables which should be included in the constraint +* ``pypsa.linopt.linexpr`` for creating linear expressions for the left hand side (lhs) of the constraint. Note that lhs includes all terms which include variables, the rhs is a constant. +* ``pypsa.linopt.write_constraint`` for writing out the constraint to the lp file +* ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow should be extracted after solving + +The are funcitons defined as such: + +.. automethod:: pypsa.linopt.get_var +.. automethod:: pypsa.linopt.linexpr +.. automethod:: pypsa.linopt.write_constraint +.. automethod:: pypsa.linopt.set_conref + +The function ``extra_postprocessing`` is not necessary when pyomo deactivated. For retrieving additional shadow prices, just pass the component name to which the constraint is attached to ``keep_shadowprices``. Inputs ------ + For the linear optimal power flow, the following data for each component are used. For almost all values, defaults are assumed if not explicitly set. For the defaults and units, see :doc:`components`. -network{snapshot_weightings} +* network{snapshot_weightings} -bus.{v_nom, carrier} +* bus.{v_nom, carrier} -load.{p_set} +* load.{p_set} -generator.{p_nom, p_nom_extendable, p_nom_min, p_nom_max, p_min_pu, p_max_pu, marginal_cost, capital_cost, efficiency, carrier} +* generator.{p_nom, p_nom_extendable, p_nom_min, p_nom_max, p_min_pu, p_max_pu, marginal_cost, capital_cost, efficiency, carrier} -storage_unit.{p_nom, p_nom_extendable, p_nom_min, p_nom_max, p_min_pu, p_max_pu, marginal_cost, capital_cost, efficiency*, standing_loss, inflow, state_of_charge_set, max_hours, state_of_charge_initial, cyclic_state_of_charge} +* storage_unit.{p_nom, p_nom_extendable, p_nom_min, p_nom_max, p_min_pu, p_max_pu, marginal_cost, capital_cost, efficiency*, standing_loss, inflow, state_of_charge_set, max_hours, state_of_charge_initial, cyclic_state_of_charge} -store.{e_nom, e_nom_extendable, e_nom_min, e_nom_max, e_min_pu, e_max_pu, e_cyclic, e_initial, capital_cost, marginal_cost, standing_loss} +* store.{e_nom, e_nom_extendable, e_nom_min, e_nom_max, e_min_pu, e_max_pu, e_cyclic, e_initial, capital_cost, marginal_cost, standing_loss} -line.{x, s_nom, s_nom_extendable, s_nom_min, s_nom_max, capital_cost} +* line.{x, s_nom, s_nom_extendable, s_nom_min, s_nom_max, capital_cost} -transformer.{x, s_nom, s_nom_extendable, s_nom_min, s_nom_max, capital_cost} +* transformer.{x, s_nom, s_nom_extendable, s_nom_min, s_nom_max, capital_cost} -link.{p_min_pu, p_max_pu, p_nom, p_nom_extendable, p_nom_min, p_nom_max, capital_cost} +* link.{p_min_pu, p_max_pu, p_nom, p_nom_extendable, p_nom_min, p_nom_max, capital_cost} -carrier.{carrier_attribute} +* carrier.{carrier_attribute} -global_constraint.{type, carrier_attribute, sense, constant} +* global_constraint.{type, carrier_attribute, sense, constant} .. note:: Note that for lines and transformers you MUST make sure that :math:`x` is non-zero, otherwise the bus admittance matrix will be singular. Outputs ------- -bus.{v_mag_pu, v_ang, p, marginal_price} -load.{p} +* bus.{v_mag_pu, v_ang, p, marginal_price} + +* load.{p} -generator.{p, p_nom_opt} +* generator.{p, p_nom_opt} -storage_unit.{p, p_nom_opt, state_of_charge, spill} +* storage_unit.{p, p_nom_opt, state_of_charge, spill} -store.{p, e_nom_opt, e} +* store.{p, e_nom_opt, e} -line.{p0, p1, s_nom_opt, mu_lower, mu_upper} +* line.{p0, p1, s_nom_opt, mu_lower, mu_upper} -transformer.{p0, p1, s_nom_opt, mu_lower, mu_upper} +* transformer.{p0, p1, s_nom_opt, mu_lower, mu_upper} -link.{p0, p1, p_nom_opt, mu_lower, mu_upper} +* link.{p0, p1, p_nom_opt, mu_lower, mu_upper} -global_constraint.{mu} +* global_constraint.{mu} diff --git a/pypsa/components.py b/pypsa/components.py index af71ec5c2..57f82f643 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -438,53 +438,55 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, solver. It allows the user to add/change constraints and add/change the objective function. - Returns - ------- - None - Other Parameters ---------------- + ptdf_tolerance : float + Only when pyomo is True. + Value below which PTDF entries are ignored + free_memory : set, default {'pyomo'} + Only when pyomo is True. + Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series + data away while the solver runs (as a pickle to disk) and/or free + `pyomo` data after the solution has been extracted. + solver_io : string, default None + Only when pyomo is True. + Solver Input-Output option, e.g. "python" to use "gurobipy" for + solver_name="gurobi" + skip_pre : bool, default False + Only when pyomo is True. + Skip the preliminary steps of computing topology, calculating + dependent values and finding bus controls. + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user + to extract further information about the solution, such as additional + shadow prices. warmstart : bool or string, default False + Only when pyomo is False. Use this to warmstart the optimization. Pass a string which gives the path to the basis file. If set to True, a path to a basis file must be given in network.basis_fn. store_basis : bool, default True + Only when pyomo is False. Whether to store the basis of the optimization results. If True, the path to the basis file is saved in network.basis_fn. Note that a basis can only be stored if simplex, dual-simplex, or barrier *with* crossover is used for solving. keep_references : bool, default False + Only when pyomo is False. Keep the references of variable and constraint names withing the network, e.g. n.generators_t.p_varref - useful for constructing extra_functionality or debugging keep_shadowprices : bool or list of component names, default None + Only when pyomo is False. Keep shadow prices for all constraints, if set to True. These are stored at e.g. n.generators_t.mu_upper for upper limit of p_nom. If a list of component names is passed, shadow prices of variables attached to those are extracted. If set to None, components default to ['Bus', 'Line', 'GlobalConstraint'] - ptdf_tolerance : float - Value below which PTDF entries are ignored - free_memory : set, default {'pyomo'} - Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series - data away while the solver runs (as a pickle to disk) and/or free - `pyomo` data after the solution has been extracted. - solver_io : string, default None - Solver Input-Output option, e.g. "python" to use "gurobipy" for - solver_name="gurobi" - skip_pre : bool, default False - Skip the preliminary steps of computing topology, calculating - dependent values and finding bus controls. - extra_postprocessing : callable function - This function must take three arguments - `extra_postprocessing(network,snapshots,duals)` and is called after - the model has solved and the results are extracted. It allows the user - to extract further information about the solution, such as additional - shadow prices. - - """ args = {'snapshots': snapshots, 'keep_files': keep_files, 'solver_options': solver_options, 'formulation': formulation, diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 74b3da1ff..9a28cf43d 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -25,7 +25,8 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, reset_counter, join_exprs, - run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk) + run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, + broadcasted_axes) import pandas as pd @@ -51,6 +52,7 @@ def define_nominal_for_extendable_variables(n, c, attr): network component of which the nominal capacity should be defined attr : str name of the variable, e.g. 'p_nom' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return @@ -72,6 +74,7 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): name of the network component attr : str name of the attribute, e.g. 'p' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return @@ -91,6 +94,7 @@ def define_dispatch_for_non_extendable_variables(n, sns, c, attr): name of the network component attr : str name of the attribute, e.g. 'p' + """ fix_i = get_non_extendable_i(n, c) if fix_i.empty: return @@ -114,6 +118,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): name of the network component attr : str name of the attribute, e.g. 'p' + """ ext_i = get_extendable_i(n, c) if ext_i.empty: return @@ -148,6 +153,7 @@ def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): name of the attribute, e.g. 'p' pnl : bool, default True Whether variable which should be fixed is time-dependent + """ if pnl: @@ -168,6 +174,7 @@ def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): def define_ramp_limit_constraints(n, sns): """ Defines ramp limits for generators wiht valid ramplimit + """ c = 'Generator' rup_i = n.df(c).query('ramp_limit_up == ramp_limit_up').index @@ -179,8 +186,7 @@ def define_ramp_limit_constraints(n, sns): #fix up gens_i = rup_i & get_non_extendable_i(n, c) - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - return_axes=True)) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') constraints = write_constraint(n, lhs, '<=', rhs) set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='nonextendables') @@ -189,15 +195,14 @@ def define_ramp_limit_constraints(n, sns): gens_i = rup_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_up'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (-limit_pu, p_nom), return_axes=True)) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom), + as_pandas=True) constraints = write_constraint(n, lhs, '<=', 0) set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='extendables') #fix down gens_i = rdown_i & get_non_extendable_i(n, c) - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - return_axes=True)) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') constraints = write_constraint(n, lhs, '>=', rhs) set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='nonextendables') @@ -206,8 +211,8 @@ def define_ramp_limit_constraints(n, sns): gens_i = rdown_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_down'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = pd.DataFrame(*linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (limit_pu, p_nom), return_axes=True)) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (limit_pu, p_nom), + as_pandas=True) constraints = write_constraint(n, lhs, '>=', 0) set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='extendables') @@ -215,14 +220,15 @@ def define_ramp_limit_constraints(n, sns): def define_nodal_balance_constraints(n, sns): """ Defines nodal balance constraint. + """ def bus_injection(c, attr, groupcol='bus', sign=1): #additional sign only necessary for branches in reverse direction if 'sign' in n.df(c): sign = sign * n.df(c).sign - vals = linexpr((sign, get_var(n, c, attr)), return_axes=True) - return pd.DataFrame(*vals).rename(columns=n.df(c)[groupcol]) + return linexpr((sign, get_var(n, c, attr)), as_pandas=True)\ + .rename(columns=n.df(c)[groupcol]) # one might reduce this a bit by using n.branches and lookup args = [['Generator', 'p'], ['Store', 'p'], ['StorageUnit', 'p_dispatch'], @@ -247,6 +253,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): def define_kirchhoff_constraints(n): """ Defines Kirchhoff voltage constraints + """ weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) @@ -276,6 +283,7 @@ def define_storage_unit_constraints(n, sns): the constraints states: previous_soc + p_store - p_dispatch + inflow - spill == soc + """ sus_i = n.storage_units.index if sus_i.empty: return @@ -300,11 +308,12 @@ def define_storage_unit_constraints(n, sns): coeff_var = [(-1, soc), (-1/eff_dispatch * eh, get_var(n, c, 'p_dispatch')), (eff_store * eh, get_var(n, c, 'p_store'))] + lhs, *axes = linexpr(*coeff_var, return_axes=True) def masked_term(coeff, var, cols): - return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ - .reindex(index=axes[0], columns=axes[1], fill_value='').values + return linexpr((coeff[cols], var[cols]), as_pandas=True)\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) lhs += masked_term(eff_stand, prev_soc_cyclic, cyclic_i) @@ -322,6 +331,7 @@ def define_store_constraints(n, sns): Defines energy balance constraints for stores. In principal this states: previous_e - p == e + """ stores_i = n.stores.index if stores_i.empty: return @@ -343,8 +353,8 @@ def define_store_constraints(n, sns): lhs, *axes = linexpr(*coeff_var, return_axes=True) def masked_term(coeff, var, cols): - return pd.DataFrame(*linexpr((coeff[cols], var[cols]), return_axes=True))\ - .reindex(index=axes[0], columns=axes[1], fill_value='').values + return linexpr((coeff[cols], var[cols]), as_pandas=True)\ + .reindex(index=axes[0], columns=axes[1], fill_value='').values lhs += masked_term(eff_stand, previous_e_cyclic, cyclic_i) lhs += masked_term(eff_stand.loc[sns[1:]], e.shift().loc[sns[1:]], noncyclic_i) @@ -369,6 +379,7 @@ def define_global_constraints(n, sns): 3. transmission_expansion_cost_limit Use this to set a limit for line expansion costs. Possible carriers are 'AC' and 'DC' + """ glcs = n.global_constraints.query('type == "primary_energy"') for name, glc in glcs.iterrows(): @@ -444,6 +455,7 @@ def define_global_constraints(n, sns): def define_objective(n): """ Defines and writes out the objective function + """ for c, attr in lookup.query('marginal_cost').index: cost = (get_as_dense(n, c, 'marginal_cost') @@ -468,6 +480,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, """ Sets up the linear problem and writes it out to a lp file, stored at n.problem_fn + """ reset_counter() @@ -543,6 +556,7 @@ def assign_solution(n, sns, variables_sol, constraints_dual, """ Helper function. Assigns the solution of a succesful optimization to the network. + """ pop = not keep_references #solutions @@ -683,10 +697,6 @@ def network_lopf(n, snapshots=None, solver_name="cbc", prices of variables attached to those are extracted. If set to None, components default to ['Bus', 'Line', 'GlobalConstraint'] - - Returns - ------- - None """ supported_solvers = ["cbc", "gurobi", 'glpk', 'scs'] if solver_name not in supported_solvers: diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 9f82ac6be..9143b7e0f 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -37,6 +37,7 @@ def write_bound(n, lower, upper, axes=None): upper are floats it demands to give pass axes, a tuple of (index, columns) or (index), for creating the variable of same upper and lower bounds. Return a series or frame with variable references. + """ axes = [axes] if isinstance(axes, pd.Index) else axes if axes is None: @@ -60,6 +61,7 @@ def write_constraint(n, lhs, sense, rhs, axes=None): constraints file. If lower and upper are numpy.ndarrays it axes must not be None but a tuple of (index, columns) or (index). Return a series or frame with constraint references. + """ axes = [axes] if isinstance(axes, pd.Index) else axes if axes is None: @@ -95,9 +97,14 @@ def broadcasted_axes(*dfs): series and frames, repespectively, are aligned. Using this function allows to subsequently use pure numpy operations and keep the axes in the background. + """ axes = [] shape = () + + if set(map(type, dfs)) == {tuple}: + dfs = sum(dfs, ()) + for df in dfs: if isinstance(df, (pd.Series, pd.DataFrame)): if len(axes): @@ -108,7 +115,7 @@ def broadcasted_axes(*dfs): return axes, shape -def linexpr(*tuples, return_axes=False): +def linexpr(*tuples, as_pandas=False, return_axes=False): """ Elementwise concatenation of tuples in the form (coefficient, variables). Coefficient and variables can be arrays, series or frames. Returns @@ -122,6 +129,9 @@ def linexpr(*tuples, return_axes=False): Each tuple must of the form (coeff, var), where * coeff is a numerical value, or a numeical array, series, frame * var is a str or a array, series, frame of variable strings + as_pandas : bool, default False + Whether to return to resulting array as a series, if 1-dimensional, or + a frame, if 2-dimensional. Supersedes return_axes argument. return_axes: Boolean, default False Whether to return index and column (if existent) @@ -133,25 +143,27 @@ def linexpr(*tuples, return_axes=False): >>> var2 = pd.Series(['b1', 'b2', 'b3']) >>> linexpr((coeff1, var1), (coeff2, var2)) - array(['+1.0 a1\n-0.5 b1\n', '+1.0 a2\n-0.3 b2\n', '+1.0 a3\n-1.0 b3\n'], - dtype=object) - + array(['+1.0 a1 -0.5 b1', '+1.0 a2 -0.3 b2', '+1.0 a3 -1.0 b3'], dtype=object) For turning the result into a series or frame again: - >>> pd.Series(*linexpr((coeff1, var1), (coeff2, var2), return_axes=True)) - 0 +1.0 a1\n-0.5 b1\n - 1 +1.0 a2\n-0.3 b2\n - 2 +1.0 a3\n-1.0 b3\n + >>> linexpr((coeff1, var1), (coeff2, var2), as_pandas=True) + 0 +1.0 a1 -0.5 b1 + 1 +1.0 a2 -0.3 b2 + 2 +1.0 a3 -1.0 b3 dtype: object - This can also be applied to DataFrames, using - pd.DataFrame(*linexpr(..., return_axes=True)). + For a further step the resulting frame can be used as the lhs of + :func:`pypsa.linopt.write_contraint` + """ - axes, shape = broadcasted_axes(*sum(tuples, ())) + axes, shape = broadcasted_axes(*tuples) expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) if np.prod(shape): for coeff, var in tuples: expr += _str_array(coeff) + _str_array(var) + '\n' + if as_pandas: + twodims = len(shape) > 1 + return pd.DataFrame(expr, *axes) if twodims else pd.Series(expr, *axes) if return_axes: return (expr, *axes) return expr @@ -174,6 +186,7 @@ def _str_array(array): def join_exprs(df): """ Helper function to join arrays, series or frames of stings together. + """ return ''.join(np.asarray(df).flatten()) @@ -181,6 +194,7 @@ def join_exprs(df): # ============================================================================= # references to vars and cons, rewrite this part to not store every reference # ============================================================================= + def _add_reference(n, df, c, attr, suffix, pnl=True): attr_name = attr + suffix if pnl: @@ -242,7 +256,7 @@ def get_var(n, c, attr, pop=False): Example ------- - get_var(n, 'Generator', 'p') + >>> get_var(n, 'Generator', 'p') ''' if n.variables.at[idx[c, attr], 'pnl']: From 03193db6e904da493ffd462729cd382b4e74c076 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 16:31:59 +0100 Subject: [PATCH 027/111] update docs II --- pypsa/linopt.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 9143b7e0f..14eac012f 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -137,15 +137,20 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): Example ------- + Initialize coefficients and variables + >>> coeff1 = 1 >>> var1 = pd.Series(['a1', 'a2', 'a3']) >>> coeff2 = pd.Series([-0.5, -0.3, -1]) >>> var2 = pd.Series(['b1', 'b2', 'b3']) + Create the linear expression strings + >>> linexpr((coeff1, var1), (coeff2, var2)) array(['+1.0 a1 -0.5 b1', '+1.0 a2 -0.3 b2', '+1.0 a3 -1.0 b3'], dtype=object) For turning the result into a series or frame again: + >>> linexpr((coeff1, var1), (coeff2, var2), as_pandas=True) 0 +1.0 a1 -0.5 b1 1 +1.0 a2 -0.3 b2 From dbcaedbcb8d9d3cbe4e25faad28270c4fdbe1133 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 16:53:14 +0100 Subject: [PATCH 028/111] update docs IV --- doc/optimal_power_flow.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index ee951416f..1ca062b7c 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -54,7 +54,7 @@ for more details). -.. important:: Since pypsa v0.15, the package enable the optimisation without the use of `pyomo `_. This make the lopf function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. +.. important:: Since version v0.15, PyPSA enables the optimisation without the use of `pyomo `_. This make the lopf function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. Note that when setting pyomo to False, the ``extra_functionality`` has to be adapted to the appropriate syntax. .. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however pypsa.linopf.ilopf covers this through an iterative process as done `in here `_. @@ -542,8 +542,8 @@ Custom constraints and other functionality ------------------------------------------ -Since PyPSA v0.15, the lopf is provided by two different modules. The ordinary implementation based on the ``pypsa.opf`` module uses -`pyomo `_ to set up the linear problem and passing it to the solver. The implementation without pyomo, based on the module ``pypsa.linopf``, uses a straight-forward approach to write out the lp file directly and explicitly running it from a solver's interface. Therefore application of custom constraints depend on whether pyomo activated or not. +Since PyPSA v0.15, the lopf function is provided by two different modules. The ordinary implementation based on the ``pypsa.opf`` module uses +`pyomo `_ to set up the linear problem and passing it to the solver. The implementation without pyomo, based on the module ``pypsa.linopf``, uses a straight-forward approach to write out the lp file directly and explicitly running it from a solver's interface. Therefore the application of custom constraints depend on whether pyomo activated or not. In general for a custom constraint, pass the function ``network.lopf`` a function ``extra_functionality`` as an argument. This function must @@ -576,16 +576,16 @@ additional shadow prices for constraints. 2. pyomo is set to False ======================== -In general when pyomo is disabled, all variable and constraint references are stored in the network object itself. Thus every variable and constraint is attached to component, e.g. the dispatch variable of network.generators.p is attached to the component 'Generator' and can be easily accessed by +In general when pyomo is disabled, all variable and constraint references are stored in the network object itself. Thus every variable and constraint is attached to a component, e.g. the dispatch variable of network.generators.p is attached to the component 'Generator' and can be easily accessed by >>> get_var(n, 'Generator', 'p') An additional constraint can easily be implemented by using the funtions * ``pypsa.linopt.get_var`` for getting the variables which should be included in the constraint -* ``pypsa.linopt.linexpr`` for creating linear expressions for the left hand side (lhs) of the constraint. Note that lhs includes all terms which include variables, the rhs is a constant. +* ``pypsa.linopt.linexpr`` for creating linear expressions for the left hand side (lhs) of the constraint. Note that only the lhs includes all terms with variables, the rhs is a constant. * ``pypsa.linopt.write_constraint`` for writing out the constraint to the lp file -* ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow should be extracted after solving +* ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow price should be extracted after solving The are funcitons defined as such: @@ -594,7 +594,7 @@ The are funcitons defined as such: .. automethod:: pypsa.linopt.write_constraint .. automethod:: pypsa.linopt.set_conref -The function ``extra_postprocessing`` is not necessary when pyomo deactivated. For retrieving additional shadow prices, just pass the component name to which the constraint is attached to ``keep_shadowprices``. +The function ``extra_postprocessing`` is not necessary when pyomo deactivated. For retrieving additional shadow prices, just pass the component name, to which the constraint is attached, to ``keep_shadowprices``. Inputs ------ From 0dfa5cea45ee4e78041adbea9d5f9f2f84489ab8 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 16:56:17 +0100 Subject: [PATCH 029/111] update docs V --- doc/optimal_power_flow.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 1ca062b7c..d116a5aa0 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -1,10 +1,12 @@ -########################### - Linear Optimal Power Flow -########################### +###################### + Optimal Power Flow +###################### See the module ``pypsa.opf`` and ``pypsa.linopf``. Optimisation with the linearised power flow equations for (mixed) AC -and DC networks is fully supported. +and DC networks is fully supported. Note that optimisation with the full non-linear power flow equations is not yet supported. + + All constraints and variables are listed below. From 7df314f30f30dd7e1c406473a12ae6415c2a389c Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 16:58:01 +0100 Subject: [PATCH 030/111] update docs VI --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index d116a5aa0..f23201f4a 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -59,7 +59,7 @@ for more details). .. important:: Since version v0.15, PyPSA enables the optimisation without the use of `pyomo `_. This make the lopf function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. Note that when setting pyomo to False, the ``extra_functionality`` has to be adapted to the appropriate syntax. -.. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however pypsa.linopf.ilopf covers this through an iterative process as done `in here `_. +.. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however ``pypsa.linopf.ilopf`` covers this through an iterative process as done `in here `_. Optimising dispatch only: a market model From e30f9d3b623dd271357c78412a135b35d3001d50 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 27 Oct 2019 17:28:06 +0100 Subject: [PATCH 031/111] update docs VII --- doc/optimal_power_flow.rst | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index f23201f4a..454182d39 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -62,8 +62,8 @@ for more details). .. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however ``pypsa.linopf.ilopf`` covers this through an iterative process as done `in here `_. -Optimising dispatch only: a market model ----------------------------------------- +Optimising dispatch only - a market model +----------------------------------------- Capacity optimisation can be turned off so that only the dispatch is optimised, like a short-run electricity market model. @@ -205,9 +205,10 @@ installable nominal power may also be introduced, e.g. Generator unit commitment constraints ------------------------------------- - These are defined in ``pypsa.opf.define_generator_variables_constraints(network,snapshots)``. +.. important:: Unit commitment constraints will only be build if pyomo is set to True + The implementation follows Chapter 4.3 of `Convex Optimization of Power Systems `_ by Joshua Adam Taylor (CUP, 2015). @@ -251,7 +252,6 @@ so that it is only non-zero if :math:`u_{n,s,t} - u_{n,s,t-1} = 1`, i.e. the gen Generator ramping constraints ----------------------------- - These are defined in ``pypsa.opf.define_generator_variables_constraints(network,snapshots)``. The implementation follows Chapter 4.3 of `Convex Optimization of Power Systems `_ by @@ -598,6 +598,19 @@ The are funcitons defined as such: The function ``extra_postprocessing`` is not necessary when pyomo deactivated. For retrieving additional shadow prices, just pass the component name, to which the constraint is attached, to ``keep_shadowprices``. +Fixing variables +---------------- + +This feature is only valid if pyomo is disabled during the lopf (i.e. ``pyomo=False``). It is possible to fix all variables to specific values. Create a dataframe or a column with the same name as the variable but with suffix '_set'. For all not NaN values additional constraints will be build to fix the variables. + +For example let's say, we want to fix the output of a single generator 'gas1' to 200 MW for all snapshots. Then we can add a dataframe ``p_set`` to network.generators_t with the according value and index. + + >>> network.generators_t['p_set'] = pd.DataFrame(200, index=network.snapshots, columns=['gas1']) + +The lopf will now build extra constraints to fix the ``p`` variables of generator 'gas1' to 200. In the same manner, we can fix the variables only for some specific snapshots. This is applicable to all variables, also ``state_of_charge`` for storage units or ``p`` for links. Static investment variables can be fixed via adding additional columns, e.g. a ``s_nom_set`` column to ``network.lines``. + + + Inputs ------ From 67c4875ff71ce8f330df8dcd4fdca8787b0f9183 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 28 Oct 2019 16:44:57 +0100 Subject: [PATCH 032/111] linopf: include transformer in KVL, fix assign_solution for rolling horizon optimisation --- pypsa/linopf.py | 101 +++++++++++++++++++++++++++--------------------- pypsa/linopt.py | 13 +++++++ 2 files changed, 70 insertions(+), 44 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 9a28cf43d..0e55f4400 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -26,7 +26,7 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, reset_counter, join_exprs, run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, - broadcasted_axes) + clear_references) import pandas as pd @@ -243,7 +243,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): .agg(lambda x: ''.join(x.values)) .reindex(columns=n.buses.index)) sense = '=' - rhs = ((- n.loads_t.p_set * n.loads.sign) + rhs = ((- n.loads_t.p_set.loc[sns] * n.loads.sign) .groupby(n.loads.bus, axis=1).sum() .reindex(columns=n.buses.index, fill_value=0)) constraints = write_constraint(n, lhs, sense, rhs) @@ -255,20 +255,24 @@ def define_kirchhoff_constraints(n): Defines Kirchhoff voltage constraints """ - weightings = n.lines.x_pu_eff.where(n.lines.carrier == 'AC', n.lines.r_pu_eff) + comps = n.passive_branch_components & set(n.variables.index.levels[0]) + branch_vars = pd.concat({c:get_var(n, c, 's') for c in comps}, axis=1) def cycle_flow(ds): ds = ds[lambda ds: ds!=0.].dropna() - vals = linexpr((ds, get_var(n, 'Line', 's')[ds.index])) + '\n' + vals = linexpr((ds, branch_vars[ds.index])) + '\n' return vals.sum(1) sns = get_var(n, 'Line', 's').index constraints = [] for sub in n.sub_networks.obj: - C = pd.DataFrame(sub.C.todense(), index=sub.lines_i()) + branches = sub.branches() + C = pd.DataFrame(sub.C.todense(), index=branches.index) if C.empty: continue - C_weighted = 1e5 * C.mul(weightings[sub.lines_i()], axis=0) + carrier = n.sub_networks.carrier[sub.name] + weightings = branches.x_pu_eff if carrier == 'AC' else branches.r_pu_eff + C_weighted = 1e5 * C.mul(weightings, axis=0) cycle_sum = C_weighted.apply(cycle_flow) cycle_sum.index = sns con = write_constraint(n, cycle_sum, '=', 0) @@ -289,11 +293,11 @@ def define_storage_unit_constraints(n, sns): if sus_i.empty: return c = 'StorageUnit' #spillage - upper = get_as_dense(n, c, 'inflow').loc[:, lambda df: df.max() > 0] + upper = get_as_dense(n, c, 'inflow', sns).loc[:, lambda df: df.max() > 0] spill = write_bound(n, 0, upper) set_varref(n, spill, 'StorageUnit', 'spill') - eh = expand_series(n.snapshot_weightings, sus_i) #elapsed hours + eh = expand_series(n.snapshot_weightings[sns], sus_i) #elapsed hours eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) eff_dispatch = expand_series(n.df(c).efficiency_dispatch, sns).T @@ -315,11 +319,12 @@ def masked_term(coeff, var, cols): return linexpr((coeff[cols], var[cols]), as_pandas=True)\ .reindex(index=axes[0], columns=axes[1], fill_value='').values - lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) + if ('StorageUnit', 'spill') in n.variables.index: + lhs += masked_term(-eh, get_var(n, c, 'spill'), spill.columns) lhs += masked_term(eff_stand, prev_soc_cyclic, cyclic_i) lhs += masked_term(eff_stand.loc[sns[1:]], soc.shift().loc[sns[1:]], noncyclic_i) - rhs = -get_as_dense(n, c, 'inflow').mul(eh) + rhs = -get_as_dense(n, c, 'inflow', sns).mul(eh) rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] constraints = write_constraint(n, lhs, '==', rhs) @@ -339,7 +344,7 @@ def define_store_constraints(n, sns): variables = write_bound(n, -np.inf, np.inf, axes=[sns, stores_i]) set_varref(n, variables, c, 'p') - eh = expand_series(n.snapshot_weightings, stores_i) #elapsed hours + eh = expand_series(n.snapshot_weightings[sns], stores_i) #elapsed hours eff_stand = expand_series(1-n.df(c).standing_loss, sns).T.pow(eh) e = get_var(n, c, 'e') @@ -416,6 +421,9 @@ def define_global_constraints(n, sns): con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + # for the next two to we need a line carrier + if len(n.global_constraints) > len(glcs): + n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) #expansion limits glcs = n.global_constraints.query('type == ' '"transmission_volume_expansion_limit"') @@ -452,17 +460,17 @@ def define_global_constraints(n, sns): set_conref(n, con, 'GlobalConstraint', 'mu', False, name) -def define_objective(n): +def define_objective(n, sns): """ Defines and writes out the objective function """ for c, attr in lookup.query('marginal_cost').index: - cost = (get_as_dense(n, c, 'marginal_cost') + cost = (get_as_dense(n, c, 'marginal_cost', sns) .loc[:, lambda ds: (ds != 0).all()] - .mul(n.snapshot_weightings, axis=0)) + .mul(n.snapshot_weightings[sns], axis=0)) if cost.empty: continue - terms = linexpr((cost, get_var(n, c, attr)[cost.columns])) + terms = linexpr((cost, get_var(n, c, attr).loc[sns, cost.columns])) for t in terms.flatten(): n.objective_f.write(t) #investment @@ -484,9 +492,6 @@ def prepare_lopf(n, snapshots=None, keep_files=False, """ reset_counter() - #used in kirchhoff and globals - n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) - cols = ['component', 'name', 'pnl', 'specification'] n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) n.constraints = pd.DataFrame(columns=cols).set_index(cols[:2]) @@ -527,7 +532,7 @@ def time_info(message): define_kirchhoff_constraints(n) define_nodal_balance_constraints(n, snapshots) define_global_constraints(n, snapshots) - define_objective(n) + define_objective(n, snapshots) if extra_functionality is not None: extra_functionality(n, snapshots) @@ -558,29 +563,36 @@ def assign_solution(n, sns, variables_sol, constraints_dual, network. """ + def set_from_frame(c, attr, df): + if n.pnl(c)[attr].empty: + n.pnl(c)[attr] = df.reindex(n.snapshots) + else: + n.pnl(c)[attr].loc[sns, :] = df.reindex(columns=n.pnl(c)[attr].columns) + pop = not keep_references - #solutions - def map_solution(c, attr, pnl): - if pnl: + #solutions, if nominal capcity was no variable set optimal value to nominal + def map_solution(c, attr): + if (c, attr) in n.variables.index: variables = get_var(n, c, attr, pop=pop) - if variables.empty: return - values = variables.stack().map(variables_sol).unstack() - if c in n.passive_branch_components: - n.pnl(c)['p0'] = values - n.pnl(c)['p1'] = - values - elif c == 'Link': - n.pnl(c)['p0'] = values - n.pnl(c)['p1'] = - values * n.df(c).efficiency + pnl = isinstance(variables, pd.DataFrame) + if pnl: + values = variables.stack().map(variables_sol).unstack() + if c in n.passive_branch_components: + set_from_frame(c, 'p0', values) + set_from_frame(c, 'p1', - values) + elif c == 'Link': + set_from_frame(c, 'p0', values) + set_from_frame(c, 'p1', - values * n.df(c).efficiency) + else: + set_from_frame(c, attr, values) else: - n.pnl(c)[attr] = values - elif not get_extendable_i(n, c).empty: - n.df(c)[attr+'_opt'] = get_var(n, c, attr, pop=pop)\ - .map(variables_sol).fillna(n.df(c)[attr]) - else: + n.df(c)[attr+'_opt'] = variables.map(variables_sol)\ + .fillna(n.df(c)[attr]) + elif lookup.at[(c, attr), 'nominal']: n.df(c)[attr+'_opt'] = n.df(c)[attr] - for (c, attr), pnl in n.variables.pnl.items(): - map_solution(c, attr, pnl) + for c, attr in lookup.index: + map_solution(c, attr) if not n.df('StorageUnit').empty: c = 'StorageUnit' @@ -590,12 +602,11 @@ def map_solution(c, attr, pnl): def map_dual(c, attr, pnl): sign = 1 if 'upper' in attr else -1 if pnl: - n.pnl(c)[attr] = (get_con(n, c, attr, pop=pop).stack() - .map(sign * constraints_dual).unstack()) + set_from_frame(c, attr, get_con(n, c, attr, pop=pop).stack() + .map(sign * constraints_dual).unstack()) else: n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(sign* constraints_dual) - if keep_shadowprices == False: keep_shadowprices = [] elif keep_shadowprices is None: @@ -619,7 +630,8 @@ def map_dual(c, attr, pnl): sign = lambda c: n.df(c).sign if 'sign' in n.df(c) else -1 #sign for 'Link' n.buses_t.p = pd.concat( [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) - for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum() + for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum()\ + .reindex(columns=n.buses.index, fill_value=0) def v_ang_for_(sub): buses_i = sub.buses_o @@ -710,14 +722,15 @@ def network_lopf(n, snapshots=None, solver_name="cbc", snapshots = _as_snapshots(n, snapshots) n.calculate_dependent_values() n.determine_network_topology() + clear_references(n) - if solver_logfile is None: - solver_logfile = "test.log" logger.info("Prepare linear problem") prepare_lopf(n, snapshots, keep_files, extra_functionality) gc.collect() - solution_fn = "/tmp/test-{}.sol".format(n.identifier) + solution_fn = f"/tmp/pypsa-solve-{n.identifier}.sol" + if solver_logfile is None: + solver_logfile = "pypsa-solve-{n.identifier}.log" if warmstart == True: warmstart = n.basis_fn diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 14eac012f..2fa5d44bc 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -301,6 +301,18 @@ def get_con(n, c, attr, pop=False): return n.df(c)[attr + con_ref_suffix] +def clear_references(n): + for c in n.iterate_components(): + keys = list(c.pnl.keys()) + for k in keys: + if (con_ref_suffix in k) or - (var_ref_suffix in k): + c.pnl.pop(k) + if 'variables' in n.__dir__(): + del n.variables + if 'constraints' in n.__dir__(): + del n.constraints + + # ============================================================================= # solvers # ============================================================================= @@ -373,6 +385,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, For more information on the glpk solver options: https://kam.mff.cuni.cz/~elias/glpk.pdf """ + # TODO use --nopresol argument for non-optimal solution output command = (f"glpsol --lp {problem_fn} --output {solution_fn}") if solver_logfile is not None: command += f' --log {solver_logfile}' From cff7ec81dc44d8f90d0f71e644e5001ce9e99263 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:09:30 +0100 Subject: [PATCH 033/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 454182d39..9a7bb8a18 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -56,7 +56,7 @@ for more details). -.. important:: Since version v0.15, PyPSA enables the optimisation without the use of `pyomo `_. This make the lopf function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. +.. important:: Since version v0.15, PyPSA enables the optimisation without the use of `pyomo `_. This make the ``lopf`` function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. Note that when setting pyomo to False, the ``extra_functionality`` has to be adapted to the appropriate syntax. .. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however ``pypsa.linopf.ilopf`` covers this through an iterative process as done `in here `_. From ef29cbf83adac1360e7a253b29ec020b383f5012 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:14:13 +0100 Subject: [PATCH 034/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 9a7bb8a18..84cbeeda0 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -545,7 +545,7 @@ Custom constraints and other functionality Since PyPSA v0.15, the lopf function is provided by two different modules. The ordinary implementation based on the ``pypsa.opf`` module uses -`pyomo `_ to set up the linear problem and passing it to the solver. The implementation without pyomo, based on the module ``pypsa.linopf``, uses a straight-forward approach to write out the lp file directly and explicitly running it from a solver's interface. Therefore the application of custom constraints depend on whether pyomo activated or not. +`pyomo `_ to set up the linear optimisation problem and passing it to the solver. The implementation without pyomo, based on the module ``pypsa.linopf``, uses a straight-forward approach to write out the ``.lp`` file directly and explicitly running it from a solver's interface. Therefore the application of custom constraints depend on whether pyomo is activated or not. In general for a custom constraint, pass the function ``network.lopf`` a function ``extra_functionality`` as an argument. This function must From bac787acb2723e98de54195c4e652a855107f439 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:14:31 +0100 Subject: [PATCH 035/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 84cbeeda0..6121f202d 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -586,7 +586,7 @@ An additional constraint can easily be implemented by using the funtions * ``pypsa.linopt.get_var`` for getting the variables which should be included in the constraint * ``pypsa.linopt.linexpr`` for creating linear expressions for the left hand side (lhs) of the constraint. Note that only the lhs includes all terms with variables, the rhs is a constant. -* ``pypsa.linopt.write_constraint`` for writing out the constraint to the lp file +* ``pypsa.linopt.write_constraint`` for writing out the constraint to the ``.lp`` file * ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow price should be extracted after solving The are funcitons defined as such: From db5e6fb4c678b329461c44a1a8b26b7ab4914365 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:14:41 +0100 Subject: [PATCH 036/111] Update pypsa/linopt.py Co-Authored-By: Fabian Neumann --- pypsa/linopt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 2fa5d44bc..120e04b53 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -190,7 +190,7 @@ def _str_array(array): def join_exprs(df): """ - Helper function to join arrays, series or frames of stings together. + Helper function to join arrays, series or frames of strings together. """ return ''.join(np.asarray(df).flatten()) From c317f04ec04b1a6598c8daeac79b5175cf06689f Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:14:51 +0100 Subject: [PATCH 037/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 6121f202d..1ebd378be 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -589,7 +589,7 @@ An additional constraint can easily be implemented by using the funtions * ``pypsa.linopt.write_constraint`` for writing out the constraint to the ``.lp`` file * ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow price should be extracted after solving -The are funcitons defined as such: +The are functions defined as such: .. automethod:: pypsa.linopt.get_var .. automethod:: pypsa.linopt.linexpr From 40533b809677ef79afb3866f51db5d67712cb921 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:15:22 +0100 Subject: [PATCH 038/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 1ebd378be..c59efa78d 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -596,7 +596,7 @@ The are functions defined as such: .. automethod:: pypsa.linopt.write_constraint .. automethod:: pypsa.linopt.set_conref -The function ``extra_postprocessing`` is not necessary when pyomo deactivated. For retrieving additional shadow prices, just pass the component name, to which the constraint is attached, to ``keep_shadowprices``. +The function ``extra_postprocessing`` is not necessary when pyomo is deactivated. For retrieving additional shadow prices, just pass the component name, to which the constraint is attached, to the ``keep_shadowprices`` parameter of the ``lopf`` function. Fixing variables ---------------- From 7aaa1f0c5c58cc088f611b8440b088ad8423631d Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:15:50 +0100 Subject: [PATCH 039/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index c59efa78d..8c5078ef0 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -601,7 +601,7 @@ The function ``extra_postprocessing`` is not necessary when pyomo is deactivated Fixing variables ---------------- -This feature is only valid if pyomo is disabled during the lopf (i.e. ``pyomo=False``). It is possible to fix all variables to specific values. Create a dataframe or a column with the same name as the variable but with suffix '_set'. For all not NaN values additional constraints will be build to fix the variables. +This feature is only valid if pyomo is disabled in the lopf function (i.e. ``pyomo=False``). It is possible to fix all variables to specific values. Create a pandas DataFrame or a column with the same name as the variable but with suffix '_set'. For all not ``NaN`` values additional constraints will be build to fix the variables. For example let's say, we want to fix the output of a single generator 'gas1' to 200 MW for all snapshots. Then we can add a dataframe ``p_set`` to network.generators_t with the according value and index. From be562a291217961f16ab28ba663780345caeb4aa Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:16:40 +0100 Subject: [PATCH 040/111] Update doc/optimal_power_flow.rst Co-Authored-By: Fabian Neumann --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 8c5078ef0..b2db0aedc 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -619,7 +619,7 @@ For the linear optimal power flow, the following data for each component are used. For almost all values, defaults are assumed if not explicitly set. For the defaults and units, see :doc:`components`. -* network{snapshot_weightings} +* network.{snapshot_weightings} * bus.{v_nom, carrier} From ef882aea260fe9bdbe8f2e2b41c26b2b1fa879b1 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:21:39 +0100 Subject: [PATCH 041/111] Update pypsa/descriptors.py Co-Authored-By: Fabian Neumann --- pypsa/descriptors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index 6c3fb1b2b..93e000c7f 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -310,7 +310,7 @@ def zsum(s, *args, **kwargs): def expand_series(ser, columns): """ - Helper function to fastly expand a series to a dataframe with according + Helper function to quickly expand a series to a dataframe with according column axis and every single column being the equal to the given series. """ return ser.to_frame(columns[0]).reindex(columns=columns).ffill(axis=1) From 0bc428659775274d237a4f1336426b6837235223 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:23:07 +0100 Subject: [PATCH 042/111] Update pypsa/linopt.py Co-Authored-By: Fabian Neumann --- pypsa/linopt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 120e04b53..1a0649277 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -127,7 +127,7 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): ---------- tulples: tuple of tuples Each tuple must of the form (coeff, var), where - * coeff is a numerical value, or a numeical array, series, frame + * coeff is a numerical value, or a numerical array, series, frame * var is a str or a array, series, frame of variable strings as_pandas : bool, default False Whether to return to resulting array as a series, if 1-dimensional, or From 831a2a1e22645cf5fb4aaf18c5d37908ef04604b Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Mon, 28 Oct 2019 17:23:31 +0100 Subject: [PATCH 043/111] Update pypsa/stats.py Co-Authored-By: Fabian Neumann --- pypsa/stats.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/stats.py b/pypsa/stats.py index 7869a22d8..388e20771 100644 --- a/pypsa/stats.py +++ b/pypsa/stats.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Post-solving statistics of network. This module contains functions to anaylise +Post-solving statistics of network. This module contains functions to anaylize an optimized network. Basic information of network can be summarized as well as constraint gaps can be double-checked. """ From 01706acae221487dad3163c6eef02d6610e829ed Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 28 Oct 2019 17:50:42 +0100 Subject: [PATCH 044/111] doc: fix typo --- doc/optimal_power_flow.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 454182d39..8182c7a2c 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -187,7 +187,7 @@ availability is a constant. If the generator's nominal power :math:`\bar{g}_{n,s}` is also the -subject of optimisation (``generator.p_nom_extendable -- True``) then +subject of optimisation (``generator.p_nom_extendable == True``) then limits ``generator.p_nom_min`` and ``generator.p_nom_max`` on the installable nominal power may also be introduced, e.g. From a8d8ba1906ce7911b76f14ba256a802be3ee19dc Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 4 Nov 2019 18:02:44 +0100 Subject: [PATCH 045/111] linopf: fix string replacement --- pypsa/linopf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 0e55f4400..355017113 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -730,7 +730,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", gc.collect() solution_fn = f"/tmp/pypsa-solve-{n.identifier}.sol" if solver_logfile is None: - solver_logfile = "pypsa-solve-{n.identifier}.log" + solver_logfile = f"pypsa-solve-{n.identifier}.log" if warmstart == True: warmstart = n.basis_fn From 8cfce1f05aaad9716df9aebc855139dae762bf10 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 7 Nov 2019 16:52:16 +0100 Subject: [PATCH 046/111] small corrections --- pypsa/linopf.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 355017113..e2f55f961 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -138,7 +138,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): set_conref(n, constraints, c, 'mu_lower', pnl=True, spec=attr) -def define_fixed_variariable_constraints(n, sns, c, attr, pnl=True): +def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): """ Sets constraints for fixing variables of a given component and attribute to the corresponding values in n.df(c)[attr + '_set'] if pnl is True, or @@ -250,7 +250,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): set_conref(n, constraints, 'Bus', 'marginal_price') -def define_kirchhoff_constraints(n): +def define_kirchhoff_constraints(n, sns): """ Defines Kirchhoff voltage constraints @@ -263,7 +263,6 @@ def cycle_flow(ds): vals = linexpr((ds, branch_vars[ds.index])) + '\n' return vals.sum(1) - sns = get_var(n, 'Line', 's').index constraints = [] for sub in n.sub_networks.obj: branches = sub.branches() @@ -519,17 +518,20 @@ def time_info(message): for c, attr in lookup.query('nominal and not handle_separately').index: define_nominal_for_extendable_variables(n, c, attr) - define_fixed_variariable_constraints(n, snapshots, c, attr, pnl=False) + # define_fixed_variable_constraints(n, snapshots, c, attr, pnl=False) for c, attr in lookup.query('not nominal and not handle_separately').index: define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) define_dispatch_for_extendable_variables(n, snapshots, c, attr) define_dispatch_for_extendable_constraints(n, snapshots, c, attr) - define_fixed_variariable_constraints(n, snapshots, c, attr) + # define_fixed_variable_constraints(n, snapshots, c, attr) + + # consider only state_of_charge_set for the moment + define_fixed_variable_constraints(n, snapshots, 'StorageUnit', 'state_of_charge') define_ramp_limit_constraints(n, snapshots) define_storage_unit_constraints(n, snapshots) define_store_constraints(n, snapshots) - define_kirchhoff_constraints(n) + define_kirchhoff_constraints(n, snapshots) define_nodal_balance_constraints(n, snapshots) define_global_constraints(n, snapshots) define_objective(n, snapshots) From 2a8c339977039bd9af87039d08517b17b947974c Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 10:44:08 +0100 Subject: [PATCH 047/111] linopf.py: add warning for non support of unit commitment pf.py: change logger.info to logger.debug --- pypsa/linopf.py | 5 +++++ pypsa/pf.py | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index e2f55f961..98b5496fd 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -720,6 +720,11 @@ def network_lopf(n, snapshots=None, solver_name="cbc", if formulation != "kirchhoff": raise NotImplementedError("Only the kirchhoff formulation is supported") + if n.generators.committable.any(): + logger.warn("Unit commitment is not yet implemented for optimsation " + "without using pyomo. The following generators will be treated as " + f"non-commitables:\n{list(n.generators.query('committable').index)}") + #disable logging because multiple slack bus calculations, keep output clean snapshots = _as_snapshots(n, snapshots) n.calculate_dependent_values() diff --git a/pypsa/pf.py b/pypsa/pf.py index 2f6a1281f..e0d2f734e 100644 --- a/pypsa/pf.py +++ b/pypsa/pf.py @@ -574,7 +574,7 @@ def find_slack_bus(sub_network): #also put it into the dataframe sub_network.network.sub_networks.at[sub_network.name,"slack_bus"] = sub_network.slack_bus -# logger.info("Slack bus for sub-network {} is {}".format(sub_network.name, sub_network.slack_bus)) + logger.debug("Slack bus for sub-network {} is {}".format(sub_network.name, sub_network.slack_bus)) def find_bus_controls(sub_network): @@ -833,7 +833,7 @@ def find_tree(sub_network, weight='x_pu'): #find bus with highest degree to use as slack tree_slack_bus, slack_degree = max(degree(sub_network.tree), key=itemgetter(1)) -# logger.info("Tree slack bus is %s with degree %d.", tree_slack_bus, slack_degree) + logger.debug("Tree slack bus is %s with degree %d.", tree_slack_bus, slack_degree) #determine which buses are supplied in tree through branch from slack From 7a989cb730a840db3fd9fd4ea87d72e7f233ea6f Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 10:55:54 +0100 Subject: [PATCH 048/111] linopf: fix typo --- pypsa/linopf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 98b5496fd..e89a3966e 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -721,9 +721,9 @@ def network_lopf(n, snapshots=None, solver_name="cbc", raise NotImplementedError("Only the kirchhoff formulation is supported") if n.generators.committable.any(): - logger.warn("Unit commitment is not yet implemented for optimsation " + logger.warn("Unit commitment is not yet implemented for optimisation " "without using pyomo. The following generators will be treated as " - f"non-commitables:\n{list(n.generators.query('committable').index)}") + f"non-committables:\n{list(n.generators.query('committable').index)}") #disable logging because multiple slack bus calculations, keep output clean snapshots = _as_snapshots(n, snapshots) From a4f3eb3dd69e61d9c069d64d4ab6d4483c3bc345 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 14:30:07 +0100 Subject: [PATCH 049/111] linopf.py: fill_value for buses without any components in KVL constraint --- pypsa/linopf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index e89a3966e..b21f5f87c 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -241,7 +241,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) .groupby(axis=1, level=0) .agg(lambda x: ''.join(x.values)) - .reindex(columns=n.buses.index)) + .reindex(columns=n.buses.index, fill_value='')) sense = '=' rhs = ((- n.loads_t.p_set.loc[sns] * n.loads.sign) .groupby(n.loads.bus, axis=1).sum() From 1019b4af6f2af612411187bb64b8951add2912c5 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Nov 2019 15:00:09 +0100 Subject: [PATCH 050/111] linopf: Correct global constraints for stores --- pypsa/descriptors.py | 1 - pypsa/linopf.py | 6 ++---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index 93e000c7f..4e7dcea8b 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -368,4 +368,3 @@ def get_bounds_pu(n, c, sns, index=slice(None), attr=None): else: min_pu = get_switchable_as_dense(n, c, min_pu_str, sns) return min_pu[index], max_pu[index] - diff --git a/pypsa/linopf.py b/pypsa/linopf.py index e89a3966e..b9f098287 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -411,10 +411,10 @@ def define_global_constraints(n, sns): n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) stores = n.stores.query('carrier in @emissions.index and not e_cyclic') if not stores.empty: - vals = linexpr((-stores.carrier.map(n.emissions), + vals = linexpr((-stores.carrier.map(emissions), get_var(n, 'Store', 'e').loc[sns[-1], stores.index])) lhs = lhs + '\n' + join_exprs(vals) - rhs -= stores.carrier.map(emissions) @ stores.state_of_charge_initial + rhs -= stores.carrier.map(emissions) @ stores.e_initial con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) @@ -842,5 +842,3 @@ def msq_diff(n, s_nom_prev): network_lopf(n, snapshots, **kwargs) n.lines.loc[ext_i, 's_nom_extendable'] = True n.links.loc[ext_links_i, 'p_nom_extendable'] = True - - From e82e5acf3c09c67f26676be6ed86132c54b398a0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 16:03:03 +0100 Subject: [PATCH 051/111] linopf add linkports --- pypsa/linopf.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index b21f5f87c..9632b66da 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -235,9 +235,15 @@ def bus_injection(c, attr, groupcol='bus', sign=1): ['StorageUnit', 'p_store', 'bus', -1], ['Line', 's', 'bus0', -1], ['Line', 's', 'bus1', 1], ['Transformer', 's', 'bus0', -1], ['Transformer', 's', 'bus1', 1], ['Link', 'p', 'bus0', -1], - ['Link', 'p', 'bus1', n.links.efficiency]] + ['Link', 'p', 'bus1', get_as_dense(n, 'Link', 'efficiency', sns)]] args = [arg for arg in args if not n.df(arg[0]).empty] + add_linkports = [i[3:] for i in n.links.columns if i.startswith('bus') + and i not in ['bus0', 'bus1']] + for i in add_linkports: + eff = get_as_dense(n, 'Link', f'efficiency{i}', sns) + args.append(['Link', 'p', f'bus{i}', eff]) + lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) .groupby(axis=1, level=0) .agg(lambda x: ''.join(x.values)) @@ -256,6 +262,7 @@ def define_kirchhoff_constraints(n, sns): """ comps = n.passive_branch_components & set(n.variables.index.levels[0]) + if len(comps) == 0: return branch_vars = pd.concat({c:get_var(n, c, 's') for c in comps}, axis=1) def cycle_flow(ds): @@ -387,15 +394,18 @@ def define_global_constraints(n, sns): """ glcs = n.global_constraints.query('type == "primary_energy"') for name, glc in glcs.iterrows(): + rhs = 0 + lhs = '' carattr = glc.carrier_attribute emissions = n.carriers.query(f'{carattr} != 0')[carattr] if emissions.empty: continue gens = n.generators.query('carrier in @emissions.index') - em_pu = gens.carrier.map(emissions)/gens.efficiency - em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T - vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) - lhs = join_exprs(vals) - rhs = glc.constant + if not gens.empty: + em_pu = gens.carrier.map(emissions)/gens.efficiency + em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T + vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) + lhs += join_exprs(vals) + rhs += glc.constant #storage units sus = n.storage_units.query('carrier in @emissions.index and ' From f058738248bba9da110104e1c1751181aceaa1a5 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 19:32:40 +0100 Subject: [PATCH 052/111] - fix functionality for muliline links - small fix in glpk objective parsing - set loads_t.p correctly --- pypsa/descriptors.py | 7 +++++++ pypsa/linopf.py | 50 ++++++++++++++++++++++++++++---------------- pypsa/linopt.py | 2 +- 3 files changed, 40 insertions(+), 19 deletions(-) diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index 4e7dcea8b..b8c5effc1 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -368,3 +368,10 @@ def get_bounds_pu(n, c, sns, index=slice(None), attr=None): else: min_pu = get_switchable_as_dense(n, c, min_pu_str, sns) return min_pu[index], max_pu[index] + +def additional_linkports(n): + return [i[3:] for i in n.links.columns if i.startswith('bus') + and i not in ['bus0', 'bus1']] + + + diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 1253a8f72..fa08c46f7 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -21,7 +21,7 @@ from .pf import (_as_snapshots, get_switchable_as_dense as get_as_dense) from .descriptors import (get_bounds_pu, get_extendable_i, get_non_extendable_i, - expand_series, nominal_attrs) + expand_series, nominal_attrs, additional_linkports) from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, reset_counter, join_exprs, @@ -227,8 +227,12 @@ def bus_injection(c, attr, groupcol='bus', sign=1): #additional sign only necessary for branches in reverse direction if 'sign' in n.df(c): sign = sign * n.df(c).sign - return linexpr((sign, get_var(n, c, attr)), as_pandas=True)\ + expr = linexpr((sign, get_var(n, c, attr)), as_pandas=True)\ .rename(columns=n.df(c)[groupcol]) + # drop empty bus2, bus3 if multiline link + if c == 'Link': + expr.drop(columns='', errors='ignore', inplace=True) + return expr # one might reduce this a bit by using n.branches and lookup args = [['Generator', 'p'], ['Store', 'p'], ['StorageUnit', 'p_dispatch'], @@ -238,18 +242,16 @@ def bus_injection(c, attr, groupcol='bus', sign=1): ['Link', 'p', 'bus1', get_as_dense(n, 'Link', 'efficiency', sns)]] args = [arg for arg in args if not n.df(arg[0]).empty] - add_linkports = [i[3:] for i in n.links.columns if i.startswith('bus') - and i not in ['bus0', 'bus1']] - for i in add_linkports: + for i in additional_linkports(n): eff = get_as_dense(n, 'Link', f'efficiency{i}', sns) args.append(['Link', 'p', f'bus{i}', eff]) - lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) + lhs = (pd.concat([bus_injection(*arg) for arg in args], axis=1) .groupby(axis=1, level=0) .agg(lambda x: ''.join(x.values)) .reindex(columns=n.buses.index, fill_value='')) sense = '=' - rhs = ((- n.loads_t.p_set.loc[sns] * n.loads.sign) + rhs = ((- get_as_dense(n, 'Load', 'p_set', sns) * n.loads.sign) .groupby(n.loads.bus, axis=1).sum() .reindex(columns=n.buses.index, fill_value=0)) constraints = write_constraint(n, lhs, sense, rhs) @@ -394,18 +396,20 @@ def define_global_constraints(n, sns): """ glcs = n.global_constraints.query('type == "primary_energy"') for name, glc in glcs.iterrows(): - rhs = 0 + rhs = glc.constant lhs = '' carattr = glc.carrier_attribute emissions = n.carriers.query(f'{carattr} != 0')[carattr] + if emissions.empty: continue + + #generators gens = n.generators.query('carrier in @emissions.index') if not gens.empty: em_pu = gens.carrier.map(emissions)/gens.efficiency em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) lhs += join_exprs(vals) - rhs += glc.constant #storage units sus = n.storage_units.query('carrier in @emissions.index and ' @@ -515,7 +519,7 @@ def time_info(message): objective_fn = f"/tmp/objective-{n.identifier}.txt" constraints_fn = f"/tmp/constraints-{n.identifier}.txt" bounds_fn = f"/tmp/bounds-{n.identifier}.txt" - n.problem_fn = f"/tmp/test-{n.identifier}.lp" + n.problem_fn = f"/tmp/pypsa-problem-{n.identifier}.lp" n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') @@ -537,6 +541,7 @@ def time_info(message): # consider only state_of_charge_set for the moment define_fixed_variable_constraints(n, snapshots, 'StorageUnit', 'state_of_charge') + define_fixed_variable_constraints(n, snapshots, 'Store', 'e') define_ramp_limit_constraints(n, snapshots) define_storage_unit_constraints(n, snapshots) @@ -594,7 +599,11 @@ def map_solution(c, attr): set_from_frame(c, 'p1', - values) elif c == 'Link': set_from_frame(c, 'p0', values) - set_from_frame(c, 'p1', - values * n.df(c).efficiency) + for i in ['1'] + additional_linkports(n): + i_eff = '' if i == '1' else i + eff = get_as_dense(n, 'Link', f'efficiency{i_eff}', sns) + set_from_frame(c, f'p{i}', - values * eff) + else: set_from_frame(c, attr, values) else: @@ -633,12 +642,17 @@ def map_dual(c, attr, pnl): get_con(n, c, attr, pop=True) #load - n.loads_t.p = n.loads_t.p_set + if len(n.loads): + load_p_set = get_as_dense(n, 'Load', 'p_set', sns) + n.loads_t["p"].loc[sns] = load_p_set # recalculate injection ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] + for i in additional_linkports(n): + ca.append(('Link', f'p{i}', f'bus{i}')) + sign = lambda c: n.df(c).sign if 'sign' in n.df(c) else -1 #sign for 'Link' n.buses_t.p = pd.concat( [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) @@ -647,14 +661,14 @@ def map_dual(c, attr, pnl): def v_ang_for_(sub): buses_i = sub.buses_o - if len(buses_i) == 1: return + if len(buses_i) == 1: + return pd.DataFrame(0, index=sns, columns=buses_i) sub.calculate_B_H(skip_pre=True) - if len(sub.buses_i()) == 1: return Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) Z -= Z[sub.slack_bus] return n.buses_t.p.reindex(columns=buses_i) @ Z - n.buses_t.v_ang = (pd.concat( - [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1) + n.buses_t.v_ang = (pd.concat([v_ang_for_(sub) for sub in n.sub_networks.obj], + axis=1) .reindex(columns=n.buses.index, fill_value=0)) @@ -765,8 +779,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", return status,termination_condition #adjust objective value - for c, attr in nominal_attrs.items(): - obj -= n.df(c)[attr] @ n.df(c).capital_cost +# for c, attr in nominal_attrs.items(): +# obj -= n.df(c)[attr] @ n.df(c).capital_cost n.objective = obj gc.collect() assign_solution(n, snapshots, variables_sol, constraints_dual, diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 1a0649277..d030e0fa4 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -408,7 +408,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, info += line info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] status = info.Status.lower().strip() - objective = float(re.sub('[^0-9]+', '', info.Objective)) + objective = float(re.sub('[^0-9\.]+', '', info.Objective)) termination_condition = status if termination_condition != "optimal": From d99f931d42a4e413dbcefae41292c44b71f415ef Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 8 Nov 2019 21:03:16 +0100 Subject: [PATCH 053/111] linopf.py: set loads_t.p from frame --- pypsa/linopf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index fa08c46f7..c8c354cae 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -643,8 +643,7 @@ def map_dual(c, attr, pnl): #load if len(n.loads): - load_p_set = get_as_dense(n, 'Load', 'p_set', sns) - n.loads_t["p"].loc[sns] = load_p_set + set_from_frame('Load', 'p', get_as_dense(n, 'Load', 'p_set', sns)) # recalculate injection ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), From 4be6befa7e977d4f607b1691032cbebff8bb133d Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 11 Nov 2019 18:13:19 +0100 Subject: [PATCH 054/111] linopf use xcounter and ccounter locally linopf use tempfile.mkstemp for output files --- pypsa/linopf.py | 50 ++++++++++++++++++++++--------------------------- pypsa/linopt.py | 34 ++++++++++++++------------------- 2 files changed, 36 insertions(+), 48 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index c8c354cae..3e68721c6 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -24,15 +24,16 @@ expand_series, nominal_attrs, additional_linkports) from .linopt import (linexpr, write_bound, write_constraint, set_conref, - set_varref, get_con, get_var, reset_counter, join_exprs, - run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, + set_varref, get_con, get_var, join_exprs, run_and_read_cbc, + run_and_read_gurobi, run_and_read_glpk, clear_references) import pandas as pd import numpy as np -import gc, string, random, time, os, re +import gc, string, random, time, os, re, shutil +from tempfile import mkstemp import logging logger = logging.getLogger(__name__) @@ -503,7 +504,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, n.problem_fn """ - reset_counter() + n._xCounter, n._cCounter = 0, 0 cols = ['component', 'name', 'pnl', 'specification'] n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) @@ -511,15 +512,11 @@ def prepare_lopf(n, snapshots=None, keep_files=False, snapshots = n.snapshots if snapshots is None else snapshots start = time.time() - def time_info(message): - logger.info(f'{message} {round(time.time()-start, 2)}s') - n.identifier = ''.join(random.choice(string.ascii_lowercase) - for i in range(8)) - objective_fn = f"/tmp/objective-{n.identifier}.txt" - constraints_fn = f"/tmp/constraints-{n.identifier}.txt" - bounds_fn = f"/tmp/bounds-{n.identifier}.txt" - n.problem_fn = f"/tmp/pypsa-problem-{n.identifier}.lp" + objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True)[1] + constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True)[1] + bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True)[1] + n.problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True)[1] n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') @@ -554,23 +551,20 @@ def time_info(message): if extra_functionality is not None: extra_functionality(n, snapshots) - n.objective_f.close() - n.constraints_f.close() n.bounds_f.write("end\n") - n.bounds_f.close() - del n.objective_f - del n.constraints_f - del n.bounds_f + n.bounds_f.close(); del n.bounds_f + n.objective_f.close(); del n.objective_f + n.constraints_f.close(); del n.constraints_f - os.system(f"cat {objective_fn} {constraints_fn} {bounds_fn} " - f"> {n.problem_fn}") + with open(n.problem_fn, 'wb') as wfd: + for f in [objective_fn, constraints_fn, bounds_fn]: + with open(f,'rb') as fd: + shutil.copyfileobj(fd, wfd) + if not keep_files: + os.remove(f) - time_info('Total preparation time:') - - if not keep_files: - for fn in [objective_fn, constraints_fn, bounds_fn]: - os.system("rm "+ fn) + logger.info(f'Total preparation time: {round(time.time()-start, 2)}s') def assign_solution(n, sns, variables_sol, constraints_dual, @@ -675,7 +669,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", solver_logfile=None, extra_functionality=None, extra_postprocessing=None, formulation="kirchhoff", keep_references=False, keep_files=False, - keep_shadowprices=None, solver_options={}, + keep_shadowprices=None, solver_options=None, warmstart=False, store_basis=True): """ Linear optimal power flow for a group of snapshots. @@ -758,9 +752,9 @@ def network_lopf(n, snapshots=None, solver_name="cbc", logger.info("Prepare linear problem") prepare_lopf(n, snapshots, keep_files, extra_functionality) gc.collect() - solution_fn = f"/tmp/pypsa-solve-{n.identifier}.sol" + solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol')[1] if solver_logfile is None: - solver_logfile = f"pypsa-solve-{n.identifier}.log" + solver_logfile = mkstemp(prefix='pypsa-solve', suffix='.log')[1] if warmstart == True: warmstart = n.basis_fn diff --git a/pypsa/linopt.py b/pypsa/linopt.py index d030e0fa4..a543657c0 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -24,13 +24,6 @@ # writing functions # ============================================================================= -xCounter = 0 -cCounter = 0 -def reset_counter(): - global xCounter, cCounter - xCounter, cCounter = 0, 0 - - def write_bound(n, lower, upper, axes=None): """ Writer function for writing out mutliple variables at a time. If lower and @@ -46,13 +39,13 @@ def write_bound(n, lower, upper, axes=None): shape = tuple(map(len, axes)) ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series length = np.prod(shape) - global xCounter - xCounter += length - variables = np.array([f'x{x}' for x in range(xCounter - length, xCounter)], + n._xCounter += length + variables = np.array([f'x{x}' for x in range(n._xCounter - length, n._xCounter)], dtype=object).reshape(shape) lower, upper = _str_array(lower), _str_array(upper) - for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): - n.bounds_f.write(s) +# for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): +# n.bounds_f.write(s) + n.bounds_f.write(join_exprs(lower + ' <= '+ variables + ' <= '+ upper + '\n')) return ser_or_frame(variables, *axes) def write_constraint(n, lhs, sense, rhs, axes=None): @@ -70,15 +63,15 @@ def write_constraint(n, lhs, sense, rhs, axes=None): shape = tuple(map(len, axes)) ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series length = np.prod(shape) - global cCounter - cCounter += length - cons = np.array([f'c{x}' for x in range(cCounter - length, cCounter)], + n._cCounter += length + cons = np.array([f'c{x}' for x in range(n._cCounter - length, n._cCounter)], dtype=object).reshape(shape) if isinstance(sense, str): sense = '=' if sense == '==' else sense lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) - for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): - n.constraints_f.write(c) +# for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): +# n.constraints_f.write(c) + n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n')) return ser_or_frame(cons, *axes) @@ -442,15 +435,16 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, https://www.gurobi.com/documentation/{gurobi_verion}/refman/parameter_descriptions.html """ import gurobipy - if (solver_logfile is not None) and (solver_options is not None): - solver_options["logfile"] = solver_logfile - # disable logging for this part, as gurobi output is doubled otherwise logging.disable(50) + m = gurobipy.read(problem_fn) if solver_options is not None: for key, value in solver_options.items(): m.setParam(key, value) + if solver_logfile is not None: + m.setParam("logfile", solver_logfile) + if warmstart: m.read(warmstart) m.optimize() From aabca4f2a32219904bc392afb4934651d36e0e58 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 11 Nov 2019 22:25:17 +0100 Subject: [PATCH 055/111] travis.yml add windows --- .travis.yml | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index ace811d36..f6a04e652 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,21 +4,19 @@ language: python sudo: false # Use container-based infrastructure +os: + - windows + - linux + matrix: include: - # - env: - # - PYTHON_VERSION="2.7" - env: - PYTHON_VERSION="3.6" - env: - PYTHON_VERSION="3.7" before_install: - - if [[ "$PYTHON_VERSION" == "2.7" ]]; then - wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; - else - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - fi + - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r From 5ee71d16ae6da41bf106bb51aa60ab556bf3dab6 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 11 Nov 2019 22:59:06 +0100 Subject: [PATCH 056/111] travis.yml test bash language --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f6a04e652..e21855bb1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ # Modified from # https://github.com/calliope-project/calliope/blob/master/.travis.yml -language: python +language: bash sudo: false # Use container-based infrastructure os: From 1b0b796559f4725a69f9277e31ac5ef8d3237072 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 11 Nov 2019 23:07:11 +0100 Subject: [PATCH 057/111] travis.yml try again integrating windows --- .travis.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index e21855bb1..367d333ca 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,11 +16,9 @@ matrix: - PYTHON_VERSION="3.7" before_install: - - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - hash -r - - conda config --set always_yes yes --set changeps1 no + # install conda + - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh + - source conda4travis.sh # - conda update -q conda # Useful for debugging any issues with conda - conda info -a From cf5cb391f0d5290e3c11f78ae26010e4bcde4ce9 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 10:14:00 +0100 Subject: [PATCH 058/111] travis.yml remove second source conda.sh --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 367d333ca..100c31eb5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,8 +21,8 @@ before_install: - source conda4travis.sh # - conda update -q conda # Useful for debugging any issues with conda - - conda info -a - - source $HOME/miniconda/etc/profile.d/conda.sh +# - conda info -a +# - source $HOME/miniconda/etc/profile.d/conda.sh install: - conda config --add pinned_packages python=$PYTHON_VERSION From 17563754bc4e769f157ce2525ce75196fc0085db Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 12:15:38 +0100 Subject: [PATCH 059/111] travis: exclude cbc from tests (only reproducing glpk solutions now) --- environment.yaml | 2 +- test/test_ac_dc_lopf.py | 2 +- test/test_opf_storage.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/environment.yaml b/environment.yaml index d71d2fa8b..0aa44a7d9 100644 --- a/environment.yaml +++ b/environment.yaml @@ -13,6 +13,6 @@ dependencies: - networkx>=1.10 - pyomo - cartopy>=0.16 - - coincbc +# - coincbc - glpk - gurobi::gurobi \ No newline at end of file diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index d38100eb6..4e0d2c9ac 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -26,7 +26,7 @@ def test_lopf(): #test results were generated with GLPK; solution should be unique, #so other solvers should not differ (tested with cbc and gurobi) - solver_name = "cbc" + solver_name = "glpk" snapshots = n.snapshots diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 9a518ef8e..5cbd7ffb1 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -25,7 +25,7 @@ def test_opf(pyomo=True): target_gen_p = pd.read_csv(target_path, index_col=0) #test results were generated with GLPK and other solvers may differ - for solver_name in ["cbc", "glpk"]: + for solver_name in ["glpk"]: n.lopf(solver_name=solver_name, pyomo=True) From b299ba978cd5bd9b96465c77f61c24db43aaf5be Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 12:24:15 +0100 Subject: [PATCH 060/111] test/* split concatenated paths in os.join --- test/test_ac_dc_lopf.py | 2 +- test/test_ac_dc_lpf.py | 2 +- test/test_opf_storage.py | 6 +++--- test/test_sclopf_scigrid.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 4e0d2c9ac..09deb15d6 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -13,7 +13,7 @@ def test_lopf(): - csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/ac-dc-meshed/ac-dc-data") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "ac-dc-meshed", "ac-dc-data") n = pypsa.Network(csv_folder_name) n.links_t.p_set.drop(columns=n.links.index, inplace=True) diff --git a/test/test_ac_dc_lpf.py b/test/test_ac_dc_lpf.py index d3774876f..7abd2330e 100644 --- a/test/test_ac_dc_lpf.py +++ b/test/test_ac_dc_lpf.py @@ -21,7 +21,7 @@ def test_lpf(): - csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/ac-dc-meshed/ac-dc-data") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "ac-dc-meshed", "ac-dc-data") network = pypsa.Network(csv_folder_name) diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 5cbd7ffb1..3307d0dd1 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -15,8 +15,8 @@ def test_opf(pyomo=True): - csv_folder_name = os.path.join(os.path.dirname(__file__), - "../examples/opf-storage-hvdc/opf-storage-data") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", + "opf-storage-hvdc","opf-storage-data") n = pypsa.Network(csv_folder_name) @@ -33,7 +33,7 @@ def test_opf(pyomo=True): if sys.version_info.major >= 3: - for solver_name in ["cbc", "glpk"]: + for solver_name in ["glpk"]: n.lopf(solver_name=solver_name, pyomo=False) diff --git a/test/test_sclopf_scigrid.py b/test/test_sclopf_scigrid.py index ae700ed1b..49b04ecf2 100644 --- a/test/test_sclopf_scigrid.py +++ b/test/test_sclopf_scigrid.py @@ -6,7 +6,7 @@ import pypsa def test_sclopf(): - csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/scigrid-de/scigrid-with-load-gen-trafos/") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "scigrid-de", "scigrid-with-load-gen-trafos") network = pypsa.Network(csv_folder_name) From 15cfeb142725d1d1c37645553bcd8363a30986cf Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 12:54:25 +0100 Subject: [PATCH 061/111] travis.yml "make test" -> "py.test" for windos compatibility --- .travis.yml | 3 ++- test/test_ac_dc_lopf.py | 6 +++--- test/test_sclopf_scigrid.py | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 100c31eb5..38fbb1828 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,7 +38,8 @@ install: # - "sh -e /etc/init.d/xvfb start" # - sleep 3 # give xvfb some time to start -script: "make test" +script: + - py.test # after_success: # - coveralls diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 09deb15d6..4874ce7b7 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -46,11 +46,11 @@ def test_lopf(): n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) equal(n.generators_t.p.loc[:,n.generators.index], - n_r.generators_t.p.loc[:,n.generators.index],decimal=4) + n_r.generators_t.p.loc[:,n.generators.index],decimal=2) equal(n.lines_t.p0.loc[:,n.lines.index], - n_r.lines_t.p0.loc[:,n.lines.index],decimal=4) + n_r.lines_t.p0.loc[:,n.lines.index],decimal=2) equal(n.links_t.p0.loc[:,n.links.index], - n_r.links_t.p0.loc[:,n.links.index],decimal=4) + n_r.links_t.p0.loc[:,n.links.index],decimal=2) diff --git a/test/test_sclopf_scigrid.py b/test/test_sclopf_scigrid.py index 49b04ecf2..0ef6af1af 100644 --- a/test/test_sclopf_scigrid.py +++ b/test/test_sclopf_scigrid.py @@ -11,7 +11,7 @@ def test_sclopf(): network = pypsa.Network(csv_folder_name) #test results were generated with GLPK and other solvers may differ - solver_name = "cbc" + solver_name = "glpk" #There are some infeasibilities without line extensions for line_name in ["316","527","602"]: From 1f84cb0a08c230e05875eb2d6f69f654f750202d Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 14:53:28 +0100 Subject: [PATCH 062/111] linopf.py handle permission error for windows --- pypsa/linopf.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 3e68721c6..30e6997f1 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -561,8 +561,9 @@ def prepare_lopf(n, snapshots=None, keep_files=False, for f in [objective_fn, constraints_fn, bounds_fn]: with open(f,'rb') as fd: shutil.copyfileobj(fd, wfd) - if not keep_files: - os.remove(f) + if not keep_files: + for f in [objective_fn, constraints_fn, bounds_fn]: + os.remove(f) logger.info(f'Total preparation time: {round(time.time()-start, 2)}s') From 62f5264fe84faed900e8f8129476eab687580f5e Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 15:15:55 +0100 Subject: [PATCH 063/111] linopf: try again closing file explicitly --- pypsa/linopf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 30e6997f1..9fbbfb77b 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -561,6 +561,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, for f in [objective_fn, constraints_fn, bounds_fn]: with open(f,'rb') as fd: shutil.copyfileobj(fd, wfd) + fd.close() if not keep_files: for f in [objective_fn, constraints_fn, bounds_fn]: os.remove(f) From 6acb6e85e535195526f3f35dfc8fe0e7892697b0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 16:15:18 +0100 Subject: [PATCH 064/111] linopf: not open files in binary mode for concatenation --- pypsa/linopf.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 9fbbfb77b..f3fc7dd11 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -557,14 +557,12 @@ def prepare_lopf(n, snapshots=None, keep_files=False, n.objective_f.close(); del n.objective_f n.constraints_f.close(); del n.constraints_f - with open(n.problem_fn, 'wb') as wfd: + with open(n.problem_fn, 'w') as wfd: for f in [objective_fn, constraints_fn, bounds_fn]: - with open(f,'rb') as fd: + with open(f,'r') as fd: shutil.copyfileobj(fd, wfd) - fd.close() - if not keep_files: - for f in [objective_fn, constraints_fn, bounds_fn]: - os.remove(f) + if not keep_files: + os.remove(f) logger.info(f'Total preparation time: {round(time.time()-start, 2)}s') From 4a4badb990aaddfaacf1fff1479be20db6a4692d Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 16:58:58 +0100 Subject: [PATCH 065/111] test: try with keep_files=True --- pypsa/linopf.py | 26 +++++++++++++------------- pypsa/linopt.py | 11 ----------- test/test_ac_dc_lopf.py | 2 +- test/test_opf_storage.py | 2 +- 4 files changed, 15 insertions(+), 26 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index f3fc7dd11..ec4065ed7 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -501,7 +501,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, extra_functionality=None): """ Sets up the linear problem and writes it out to a lp file, stored at - n.problem_fn + problem_fn """ n._xCounter, n._cCounter = 0, 0 @@ -516,7 +516,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True)[1] constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True)[1] bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True)[1] - n.problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True)[1] + problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True)[1] n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') @@ -557,14 +557,15 @@ def prepare_lopf(n, snapshots=None, keep_files=False, n.objective_f.close(); del n.objective_f n.constraints_f.close(); del n.constraints_f - with open(n.problem_fn, 'w') as wfd: + with open(problem_fn, 'wb') as wfd: for f in [objective_fn, constraints_fn, bounds_fn]: - with open(f,'r') as fd: + with open(f,'rb') as fd: shutil.copyfileobj(fd, wfd) if not keep_files: os.remove(f) logger.info(f'Total preparation time: {round(time.time()-start, 2)}s') + return problem_fn def assign_solution(n, sns, variables_sol, constraints_dual, @@ -748,13 +749,11 @@ def network_lopf(n, snapshots=None, solver_name="cbc", n.determine_network_topology() clear_references(n) - logger.info("Prepare linear problem") - prepare_lopf(n, snapshots, keep_files, extra_functionality) - gc.collect() + problem_fn = prepare_lopf(n, snapshots, keep_files, extra_functionality) solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol')[1] if solver_logfile is None: - solver_logfile = mkstemp(prefix='pypsa-solve', suffix='.log')[1] + fdl, solver_logfile = mkstemp(prefix='pypsa-solve', suffix='.log') if warmstart == True: warmstart = n.basis_fn @@ -763,19 +762,20 @@ def network_lopf(n, snapshots=None, solver_name="cbc", logger.info("Solve linear problem") solve = eval(f'run_and_read_{solver_name}') - res = solve(n, n.problem_fn, solution_fn, solver_logfile, + res = solve(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart, store_basis) status, termination_condition, variables_sol, constraints_dual, obj = res - del n.problem_fn if termination_condition != "optimal": return status,termination_condition + if not keep_files: + os.remove(problem_fn); os.remove(solution_fn) + #adjust objective value -# for c, attr in nominal_attrs.items(): -# obj -= n.df(c)[attr] @ n.df(c).capital_cost + for c, attr in nominal_attrs.items(): + obj -= n.df(c)[attr] @ n.df(c).capital_cost n.objective = obj - gc.collect() assign_solution(n, snapshots, variables_sol, constraints_dual, keep_references=keep_references, keep_shadowprices=keep_shadowprices) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index a543657c0..2c52a7b00 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -359,10 +359,6 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, variables_sol = sol[variables_b][2] constraints_dual = sol[~variables_b][3] - if not keep_files: - os.system("rm "+ problem_fn) - os.system("rm "+ solution_fn) - return (status, termination_condition, variables_sol, constraints_dual, objective) @@ -415,10 +411,6 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') .fillna(0)) - if not keep_files: - os.system("rm "+ problem_fn) - os.system("rm "+ solution_fn) - return (status, termination_condition, variables_sol, constraints_dual, objective) @@ -458,9 +450,6 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, logger.info('No model basis stored') del n.basis_fn - if not keep_files: - os.system("rm "+ problem_fn) - Status = gurobipy.GRB.Status statusmap = {getattr(Status, s) : s.lower() for s in Status.__dir__() if not s.startswith('_')} diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 4874ce7b7..0a76a92a7 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -43,7 +43,7 @@ def test_lopf(): n_r.links_t.p0.loc[:,n.links.index],decimal=4) if sys.version_info.major >= 3: - n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) + n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False, keep_files=True) equal(n.generators_t.p.loc[:,n.generators.index], n_r.generators_t.p.loc[:,n.generators.index],decimal=2) diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 3307d0dd1..712798f7e 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -35,7 +35,7 @@ def test_opf(pyomo=True): for solver_name in ["glpk"]: - n.lopf(solver_name=solver_name, pyomo=False) + n.lopf(solver_name=solver_name, pyomo=False, keep_files=True) equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) From 1e6d7364a9d3bb7fc3be294148a3e093c7529598 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 17:28:24 +0100 Subject: [PATCH 066/111] travis: test cbc installation only for linux and osx --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 38fbb1828..732b67e18 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,6 +29,7 @@ install: - conda create -n pypsa python pip - conda env update -n pypsa --file=environment.yaml - conda env update -n pypsa --file=environment_dev.yaml + - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda update -n pypsa coincbc ; fi - conda activate pypsa # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . @@ -43,3 +44,4 @@ script: # after_success: # - coveralls + From 0990cfc3d376a656f4204800e88d9586895eda4e Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 17:56:57 +0100 Subject: [PATCH 067/111] travis: install cbc after environment activation --- .travis.yml | 11 ++--------- environment_dev.yaml | 1 + 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 732b67e18..cdcdcef34 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,21 +16,14 @@ matrix: - PYTHON_VERSION="3.7" before_install: - # install conda - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh - source conda4travis.sh - # - conda update -q conda - # Useful for debugging any issues with conda -# - conda info -a -# - source $HOME/miniconda/etc/profile.d/conda.sh install: - conda config --add pinned_packages python=$PYTHON_VERSION - - conda create -n pypsa python pip - - conda env update -n pypsa --file=environment.yaml - - conda env update -n pypsa --file=environment_dev.yaml - - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda update -n pypsa coincbc ; fi + - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml - conda activate pypsa + - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda install coincbc ; fi # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . diff --git a/environment_dev.yaml b/environment_dev.yaml index 2f5c0050b..32f1ab41b 100644 --- a/environment_dev.yaml +++ b/environment_dev.yaml @@ -4,6 +4,7 @@ channels: - conda-forge dependencies: + - python - pip - pytest - pytest-cov From fcec8bd8d36edd873692eb31dc597ccd0e763923 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 18:05:17 +0100 Subject: [PATCH 068/111] travis channel specification for coincbc --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cdcdcef34..1874ff52e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ install: - conda config --add pinned_packages python=$PYTHON_VERSION - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml - conda activate pypsa - - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda install coincbc ; fi + - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda install -c conda-forge coincbc ; fi # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . From 6453126b260fde9526ecae55d906cf3de1f5aba6 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 18:33:05 +0100 Subject: [PATCH 069/111] travis: try with if else statement --- .travis.yml | 8 ++++++-- environment_non_win.yaml | 7 +++++++ 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 environment_non_win.yaml diff --git a/.travis.yml b/.travis.yml index 1874ff52e..289011cac 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,9 +21,13 @@ before_install: install: - conda config --add pinned_packages python=$PYTHON_VERSION - - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml + - if [ "$TRAVIS_OS_NAME" != "windows" ] + then + conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml --file=environment_non_win.yaml + else + conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml + fi - conda activate pypsa - - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda install -c conda-forge coincbc ; fi # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . diff --git a/environment_non_win.yaml b/environment_non_win.yaml new file mode 100644 index 000000000..79cd04365 --- /dev/null +++ b/environment_non_win.yaml @@ -0,0 +1,7 @@ +name: pypsa + +channels: + - conda-forge + +dependencies: + - coincbc \ No newline at end of file From 83f63f8842a89326f98fe3c3e4773f31b4161250 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 18:46:37 +0100 Subject: [PATCH 070/111] travis: rewrite if statement --- .travis.yml | 7 +------ environment.yaml | 4 ++-- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 289011cac..9775287a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,12 +21,7 @@ before_install: install: - conda config --add pinned_packages python=$PYTHON_VERSION - - if [ "$TRAVIS_OS_NAME" != "windows" ] - then - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml --file=environment_non_win.yaml - else - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml - fi + - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml --file=environment_non_win.yaml; else conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml; fi; - conda activate pypsa # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . diff --git a/environment.yaml b/environment.yaml index 0aa44a7d9..c6459ec34 100644 --- a/environment.yaml +++ b/environment.yaml @@ -13,6 +13,6 @@ dependencies: - networkx>=1.10 - pyomo - cartopy>=0.16 -# - coincbc - glpk - - gurobi::gurobi \ No newline at end of file + - gurobi::gurobi +# - coincbc # will be added by travis \ No newline at end of file From b78e7e5392d0114a66a6d11938b9c1bd1405c016 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 12 Nov 2019 22:29:56 +0100 Subject: [PATCH 071/111] travis.yaml: try with adding cbc requirement in before_install --- .travis.yml | 10 +++++----- environment.yaml | 2 +- environment_non_win.yaml | 7 ------- 3 files changed, 6 insertions(+), 13 deletions(-) delete mode 100644 environment_non_win.yaml diff --git a/.travis.yml b/.travis.yml index 9775287a8..746bfa04c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,24 +4,24 @@ language: bash sudo: false # Use container-based infrastructure -os: - - windows - - linux matrix: include: + - os: + - windows + - linux - env: - PYTHON_VERSION="3.6" - - env: - PYTHON_VERSION="3.7" before_install: - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh - source conda4travis.sh + - if [ "$TRAVIS_OS_NAME" != "windows" ]; then echo " - coincbc" >> environment.yaml; fi; install: - conda config --add pinned_packages python=$PYTHON_VERSION - - if [ "$TRAVIS_OS_NAME" != "windows" ]; then conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml --file=environment_non_win.yaml; else conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml; fi; + - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml - conda activate pypsa # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . diff --git a/environment.yaml b/environment.yaml index c6459ec34..66c730044 100644 --- a/environment.yaml +++ b/environment.yaml @@ -13,6 +13,6 @@ dependencies: - networkx>=1.10 - pyomo - cartopy>=0.16 +# - coincbc - glpk - gurobi::gurobi -# - coincbc # will be added by travis \ No newline at end of file diff --git a/environment_non_win.yaml b/environment_non_win.yaml deleted file mode 100644 index 79cd04365..000000000 --- a/environment_non_win.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: pypsa - -channels: - - conda-forge - -dependencies: - - coincbc \ No newline at end of file From 16a814fdafddcce3cdcf6afcd241e7e40e6363ae Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 00:21:47 +0100 Subject: [PATCH 072/111] travis.yaml: split environment updates again --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 746bfa04c..38bb9b867 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,8 @@ before_install: install: - conda config --add pinned_packages python=$PYTHON_VERSION - - conda env create -n pypsa --file=environment.yaml --file=environment_dev.yaml + - conda env create -n pypsa --file=environment.yaml + - conda env update -n pypsa --file=environment_dev.yaml - conda activate pypsa # - conda install -q -c conda-forge python-coveralls # don't install on appveyor - pip install --no-cache-dir . From 3b7c74d1b5efe642ad6e6c7144a80de46b6f3a94 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 00:34:39 +0100 Subject: [PATCH 073/111] travis.yaml: try resolving package conflicts in conda install --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 38bb9b867..ae2233b70 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,8 @@ before_install: install: - conda config --add pinned_packages python=$PYTHON_VERSION - - conda env create -n pypsa --file=environment.yaml + - conda create -n pypsa python pip + - conda env update -n pypsa --file=environment.yaml - conda env update -n pypsa --file=environment_dev.yaml - conda activate pypsa # - conda install -q -c conda-forge python-coveralls # don't install on appveyor From b726cae83cb67d5a373fb5df5fe6ae28d49fef23 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 10:34:17 +0100 Subject: [PATCH 074/111] test: reintegrate cbc solvers --- .travis.yml | 1 + test/test_ac_dc_lopf.py | 17 +++++------------ test/test_opf_storage.py | 11 +++-------- test/test_sclopf_scigrid.py | 11 ++++++----- 4 files changed, 15 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index ae2233b70..7fcdead9e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ matrix: - os: - windows - linux + - osx - env: - PYTHON_VERSION="3.6" - PYTHON_VERSION="3.7" diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 0a76a92a7..463841223 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -1,32 +1,26 @@ -from __future__ import print_function, division -from __future__ import absolute_import - import pypsa - from itertools import product - import os - from numpy.testing import assert_array_almost_equal as equal - import sys +solver_name = 'cbc' if sys.platform == 'win32' else 'glpk' + + def test_lopf(): - csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "ac-dc-meshed", "ac-dc-data") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", + "ac-dc-meshed", "ac-dc-data") n = pypsa.Network(csv_folder_name) n.links_t.p_set.drop(columns=n.links.index, inplace=True) - results_folder_name = os.path.join(csv_folder_name,"results-lopf") n_r = pypsa.Network(results_folder_name) - #test results were generated with GLPK; solution should be unique, #so other solvers should not differ (tested with cbc and gurobi) - solver_name = "glpk" snapshots = n.snapshots @@ -53,6 +47,5 @@ def test_lopf(): n_r.links_t.p0.loc[:,n.links.index],decimal=2) - if __name__ == "__main__": test_lopf() diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 712798f7e..198e2bd8d 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -1,16 +1,11 @@ -from __future__ import print_function, division -from __future__ import absolute_import import pypsa - import pandas as pd - import sys - import os - from numpy.testing import assert_array_almost_equal as equal +solvers = ['glpk'] if sys.platform == 'win32' else ['cbc', 'glpk'] def test_opf(pyomo=True): @@ -25,7 +20,7 @@ def test_opf(pyomo=True): target_gen_p = pd.read_csv(target_path, index_col=0) #test results were generated with GLPK and other solvers may differ - for solver_name in ["glpk"]: + for solver_name in solvers: n.lopf(solver_name=solver_name, pyomo=True) @@ -33,7 +28,7 @@ def test_opf(pyomo=True): if sys.version_info.major >= 3: - for solver_name in ["glpk"]: + for solver_name in solvers: n.lopf(solver_name=solver_name, pyomo=False, keep_files=True) diff --git a/test/test_sclopf_scigrid.py b/test/test_sclopf_scigrid.py index 0ef6af1af..45c2bcd2e 100644 --- a/test/test_sclopf_scigrid.py +++ b/test/test_sclopf_scigrid.py @@ -1,17 +1,18 @@ -from __future__ import print_function, division -from __future__ import absolute_import - import os import numpy as np import pypsa +import sys + +solver_name = 'cbc' if sys.platform == 'win32' else 'glpk' + def test_sclopf(): - csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "scigrid-de", "scigrid-with-load-gen-trafos") + csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", + "scigrid-de", "scigrid-with-load-gen-trafos") network = pypsa.Network(csv_folder_name) #test results were generated with GLPK and other solvers may differ - solver_name = "glpk" #There are some infeasibilities without line extensions for line_name in ["316","527","602"]: From 4dc1f5174c76506ecd64fc48c17ab4c7dcf384f7 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 10:43:38 +0100 Subject: [PATCH 075/111] travis.yaml: set full matrix of os and python versions --- .travis.yml | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7fcdead9e..644d23ae2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,16 +4,13 @@ language: bash sudo: false # Use container-based infrastructure - -matrix: - include: - - os: - - windows - - linux - - osx - - env: - - PYTHON_VERSION="3.6" - - PYTHON_VERSION="3.7" +- os: + - windows + - linux + - osx +- env: + - PYTHON_VERSION="3.6" + - PYTHON_VERSION="3.7" before_install: - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh From 5e6548de88a6071fd8f24c2e079e318c9d57f1e0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 11:43:31 +0100 Subject: [PATCH 076/111] test: correct alignment of solvers --- test/test_ac_dc_lopf.py | 2 +- test/test_sclopf_scigrid.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 463841223..b41066b77 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -4,7 +4,7 @@ from numpy.testing import assert_array_almost_equal as equal import sys -solver_name = 'cbc' if sys.platform == 'win32' else 'glpk' +solver_name = 'glpk' if sys.platform == 'win32' else 'cbc' def test_lopf(): diff --git a/test/test_sclopf_scigrid.py b/test/test_sclopf_scigrid.py index 45c2bcd2e..1eb580d7e 100644 --- a/test/test_sclopf_scigrid.py +++ b/test/test_sclopf_scigrid.py @@ -3,7 +3,7 @@ import pypsa import sys -solver_name = 'cbc' if sys.platform == 'win32' else 'glpk' +solver_name = 'glpk' if sys.platform == 'win32' else 'cbc' def test_sclopf(): From 156f25b24eb3f27d66e2d9eadcb4edd3ba18db03 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 12:10:03 +0100 Subject: [PATCH 077/111] travis: correct typo --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 644d23ae2..63a8a2b53 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,11 +4,11 @@ language: bash sudo: false # Use container-based infrastructure -- os: +os: - windows - linux - osx -- env: +env: - PYTHON_VERSION="3.6" - PYTHON_VERSION="3.7" From df0828224e3d9cf7850f6c553f4abe78053aff57 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 13:55:48 +0100 Subject: [PATCH 078/111] linopf.py: explicit closing of temp files via file descriptor (necessary for windows machines) test: set keep_files=False, check if windows works now --- pypsa/linopf.py | 32 +++++++++++++++++++------------- pypsa/linopt.py | 7 ++++--- test/test_ac_dc_lopf.py | 2 +- test/test_opf_storage.py | 2 +- 4 files changed, 25 insertions(+), 18 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index ec4065ed7..c883f2a0e 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -500,8 +500,12 @@ def define_objective(n, sns): def prepare_lopf(n, snapshots=None, keep_files=False, extra_functionality=None): """ - Sets up the linear problem and writes it out to a lp file, stored at - problem_fn + Sets up the linear problem and writes it out to a lp file + + Returns + ------- + Tuple (fdp, problem_fn) indicating the file descriptor and the file name of + the lp file """ n._xCounter, n._cCounter = 0, 0 @@ -513,10 +517,10 @@ def prepare_lopf(n, snapshots=None, keep_files=False, snapshots = n.snapshots if snapshots is None else snapshots start = time.time() - objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True)[1] - constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True)[1] - bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True)[1] - problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True)[1] + fdo, objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True) + fdc, constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True) + fdb, bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True) + fdp, problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True) n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') @@ -553,10 +557,11 @@ def prepare_lopf(n, snapshots=None, keep_files=False, n.bounds_f.write("end\n") - n.bounds_f.close(); del n.bounds_f - n.objective_f.close(); del n.objective_f - n.constraints_f.close(); del n.constraints_f + # explicit closing with file descriptor is necessary for windows machines + for f, fd in (('bounds_f', fdb), ('constraints_f', fdc), ('objective_f', fdo)): + getattr(n, f).close(); delattr(n, f); os.close(fd) + #concate files with open(problem_fn, 'wb') as wfd: for f in [objective_fn, constraints_fn, bounds_fn]: with open(f,'rb') as fd: @@ -565,7 +570,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, os.remove(f) logger.info(f'Total preparation time: {round(time.time()-start, 2)}s') - return problem_fn + return fdp, problem_fn def assign_solution(n, sns, variables_sol, constraints_dual, @@ -750,8 +755,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", clear_references(n) logger.info("Prepare linear problem") - problem_fn = prepare_lopf(n, snapshots, keep_files, extra_functionality) - solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol')[1] + fdp, problem_fn = prepare_lopf(n, snapshots, keep_files, extra_functionality) + fds, solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol') if solver_logfile is None: fdl, solver_logfile = mkstemp(prefix='pypsa-solve', suffix='.log') @@ -770,7 +775,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", return status,termination_condition if not keep_files: - os.remove(problem_fn); os.remove(solution_fn) + os.close(fdp); os.remove(problem_fn) + os.close(fds); os.remove(solution_fn) #adjust objective value for c, attr in nominal_attrs.items(): diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 2c52a7b00..b057b9791 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -388,11 +388,11 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, os.system(command) - data = open(solution_fn) + f = open(solution_fn) info = '' linebreak = False while not linebreak: - line = data.readline() + line = f.readline() linebreak = line == '\n' info += line info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] @@ -403,13 +403,14 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, if termination_condition != "optimal": return status, termination_condition, None, None, None - sol = pd.read_fwf(data).set_index('Row name') + sol = pd.read_fwf(f).set_index('Row name') variables_b = sol.index.str[0] == 'x' variables_sol = sol[variables_b]['Activity'].astype(float) sol = sol[~variables_b] constraints_b = sol.index.str[0] == 'c' constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') .fillna(0)) + f.close() return (status, termination_condition, variables_sol, constraints_dual, objective) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index b41066b77..7b5bfb7d7 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -37,7 +37,7 @@ def test_lopf(): n_r.links_t.p0.loc[:,n.links.index],decimal=4) if sys.version_info.major >= 3: - n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False, keep_files=True) + n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) equal(n.generators_t.p.loc[:,n.generators.index], n_r.generators_t.p.loc[:,n.generators.index],decimal=2) diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 198e2bd8d..c554199c8 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -30,7 +30,7 @@ def test_opf(pyomo=True): for solver_name in solvers: - n.lopf(solver_name=solver_name, pyomo=False, keep_files=True) + n.lopf(solver_name=solver_name, pyomo=False) equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) From 67be12b110661533684888dc4b0a19672a667c82 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 14:52:37 +0100 Subject: [PATCH 079/111] linopf.py: fix sign for marginal price --- pypsa/linopf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index c883f2a0e..5666d5b85 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -621,7 +621,7 @@ def map_solution(c, attr): #duals def map_dual(c, attr, pnl): - sign = 1 if 'upper' in attr else -1 + sign = 1 if ('upper' in attr or attr == 'marginal_price') else -1 if pnl: set_from_frame(c, attr, get_con(n, c, attr, pop=pop).stack() .map(sign * constraints_dual).unstack()) From 841f74c19ec76a1a12bc7e888d3ec8a250e1422e Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 15:43:52 +0100 Subject: [PATCH 080/111] components.py: sort args of lopf linopf.py: correct sign for shadowprices --- pypsa/components.py | 17 ++++++++--------- pypsa/linopf.py | 6 ++++-- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pypsa/components.py b/pypsa/components.py index 57f82f643..526f04d54 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -402,10 +402,9 @@ def set_snapshots(self,snapshots): #NB: No need to rebind pnl to self, since haven't changed it - def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, - solver_logfile=None, solver_options={}, keep_files=False, - formulation="kirchhoff", extra_postprocessing=None, pyomo=True, - **kwargs): + def lopf(self, snapshots=None, pyomo=True, solver_name="glpk", + solver_options={}, solver_logfile=None, formulation="kirchhoff", + keep_files=False, extra_functionality=None, **kwargs): """ Linear optimal power flow for a group of snapshots. @@ -414,17 +413,17 @@ def lopf(self, snapshots=None, solver_name="glpk", extra_functionality=None, snapshots : list or index slice A list of snapshots to optimise, must be a subset of network.snapshots, defaults to network.snapshots - solver_name : string - Must be a solver name that pyomo recognises and that is - installed, e.g. "glpk", "gurobi" pyomo : bool, default True Whether to use pyomo for building and solving the model, setting this to False saves a lot of memory and time. - solver_logfile : None|string - If not None, sets the logfile option of the solver. + solver_name : string + Must be a solver name that pyomo recognises and that is + installed, e.g. "glpk", "gurobi" solver_options : dictionary A dictionary with additional options that get passed to the solver. (e.g. {'threads':2} tells gurobi to use only 2 cpus) + solver_logfile : None|string + If not None, sets the logfile option of the solver. keep_files : bool, default False Keep the files that pyomo constructs from OPF problem construction, e.g. .lp file - useful for debugging diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 5666d5b85..4278ad0c5 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -581,7 +581,9 @@ def assign_solution(n, sns, variables_sol, constraints_dual, """ def set_from_frame(c, attr, df): - if n.pnl(c)[attr].empty: + if attr not in n.pnl(c): #use this for subnetworks_t + n.pnl(c)[attr] = df.reindex(n.snapshots) + elif n.pnl(c)[attr].empty: n.pnl(c)[attr] = df.reindex(n.snapshots) else: n.pnl(c)[attr].loc[sns, :] = df.reindex(columns=n.pnl(c)[attr].columns) @@ -621,7 +623,7 @@ def map_solution(c, attr): #duals def map_dual(c, attr, pnl): - sign = 1 if ('upper' in attr or attr == 'marginal_price') else -1 + sign = -1 if 'lower' in attr else 1 if pnl: set_from_frame(c, attr, get_con(n, c, attr, pop=pop).stack() .map(sign * constraints_dual).unstack()) From a553d68364667e5af2df541b3a85ed395a3162e9 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 13 Nov 2019 17:07:33 +0100 Subject: [PATCH 081/111] linopt.py: ensure integer length --- pypsa/linopt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index b057b9791..9c5ebb77d 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -38,7 +38,7 @@ def write_bound(n, lower, upper, axes=None): else: shape = tuple(map(len, axes)) ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = np.prod(shape) + length = int(np.prod(shape)) n._xCounter += length variables = np.array([f'x{x}' for x in range(n._xCounter - length, n._xCounter)], dtype=object).reshape(shape) @@ -62,7 +62,7 @@ def write_constraint(n, lhs, sense, rhs, axes=None): else: shape = tuple(map(len, axes)) ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = np.prod(shape) + length = int(np.prod(shape)) n._cCounter += length cons = np.array([f'c{x}' for x in range(n._cCounter - length, n._cCounter)], dtype=object).reshape(shape) From e8be54ba0be189ea38f07e2003cc74994f2f1bdc Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 14 Nov 2019 11:39:29 +0100 Subject: [PATCH 082/111] linopf.py: add objective constant to objective function (no post-processing correction) --- pypsa/linopf.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 4278ad0c5..1149bb294 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -479,6 +479,12 @@ def define_objective(n, sns): Defines and writes out the objective function """ + # constant for already done investment + nom_attr = nominal_attrs.items() + constant = sum(n.df(c)[attr] @ n.df(c).capital_cost for c, attr in nom_attr) + object_const = write_bound(n, constant, constant) + n.objective_f.write(linexpr((1, object_const))[0]) + for c, attr in lookup.query('marginal_cost').index: cost = (get_as_dense(n, c, 'marginal_cost', sns) .loc[:, lambda ds: (ds != 0).all()] @@ -774,15 +780,12 @@ def network_lopf(n, snapshots=None, solver_name="cbc", status, termination_condition, variables_sol, constraints_dual, obj = res if termination_condition != "optimal": - return status,termination_condition + return status, termination_condition if not keep_files: os.close(fdp); os.remove(problem_fn) os.close(fds); os.remove(solution_fn) - #adjust objective value - for c, attr in nominal_attrs.items(): - obj -= n.df(c)[attr] @ n.df(c).capital_cost n.objective = obj assign_solution(n, snapshots, variables_sol, constraints_dual, keep_references=keep_references, From 3fd1a83c9a2cd2407bde457c28e518317b0ad921 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sat, 16 Nov 2019 01:47:53 +0100 Subject: [PATCH 083/111] linopf/linopt: detache variables and constraints from components --- pypsa/linopf.py | 121 ++++++++++++++++++++++++++++++------------------ pypsa/linopt.py | 46 ++++++++---------- 2 files changed, 95 insertions(+), 72 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 1149bb294..acd1913dc 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -21,7 +21,7 @@ from .pf import (_as_snapshots, get_switchable_as_dense as get_as_dense) from .descriptors import (get_bounds_pu, get_extendable_i, get_non_extendable_i, - expand_series, nominal_attrs, additional_linkports) + expand_series, nominal_attrs, additional_linkports, Dict) from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, join_exprs, run_and_read_cbc, @@ -515,6 +515,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, """ n._xCounter, n._cCounter = 0, 0 + n.vars, n.cons = Dict(), Dict() cols = ['component', 'name', 'pnl', 'specification'] n.variables = pd.DataFrame(columns=cols).set_index(cols[:2]) @@ -586,72 +587,102 @@ def assign_solution(n, sns, variables_sol, constraints_dual, network. """ - def set_from_frame(c, attr, df): - if attr not in n.pnl(c): #use this for subnetworks_t - n.pnl(c)[attr] = df.reindex(n.snapshots) - elif n.pnl(c)[attr].empty: - n.pnl(c)[attr] = df.reindex(n.snapshots) + + def set_from_frame(pnl, attr, df): + if attr not in pnl: #use this for subnetworks_t + pnl[attr] = df.reindex(n.snapshots) + elif pnl[attr].empty: + pnl[attr] = df.reindex(n.snapshots) else: - n.pnl(c)[attr].loc[sns, :] = df.reindex(columns=n.pnl(c)[attr].columns) + pnl[attr].loc[sns, :] = df.reindex(columns=pnl[attr].columns) pop = not keep_references - #solutions, if nominal capcity was no variable set optimal value to nominal def map_solution(c, attr): - if (c, attr) in n.variables.index: - variables = get_var(n, c, attr, pop=pop) - pnl = isinstance(variables, pd.DataFrame) - if pnl: - values = variables.stack().map(variables_sol).unstack() - if c in n.passive_branch_components: - set_from_frame(c, 'p0', values) - set_from_frame(c, 'p1', - values) - elif c == 'Link': - set_from_frame(c, 'p0', values) - for i in ['1'] + additional_linkports(n): - i_eff = '' if i == '1' else i - eff = get_as_dense(n, 'Link', f'efficiency{i_eff}', sns) - set_from_frame(c, f'p{i}', - values * eff) - - else: - set_from_frame(c, attr, values) + variables = get_var(n, c, attr, pop=pop) + predefined = True + if (c, attr) not in lookup.index: + predefined = False + n.sols[c] = n.sols[c] if c in n.sols else Dict(df=pd.DataFrame(), pnl={}) + + if isinstance(variables, pd.DataFrame): + # case that variables are timedependent + pnl = n.pnl(c) if predefined else n.sols[c].pnl + values = variables.stack().map(variables_sol).unstack() + if c in n.passive_branch_components: + set_from_frame(pnl, 'p0', values) + set_from_frame(pnl, 'p1', - values) + elif c == 'Link': + set_from_frame(pnl, 'p0', values) + for i in ['1'] + additional_linkports(n): + i_eff = '' if i == '1' else i + eff = get_as_dense(n, 'Link', f'efficiency{i_eff}', sns) + set_from_frame(pnl, f'p{i}', - values * eff) + else: + set_from_frame(pnl, attr, values) + else: + # case that variables are static + if predefined: + n.df(c)[attr + 'opt'] = variables.map(variables_sol)\ + .fillna(n.df(c)[attr]) else: - n.df(c)[attr+'_opt'] = variables.map(variables_sol)\ - .fillna(n.df(c)[attr]) - elif lookup.at[(c, attr), 'nominal']: - n.df(c)[attr+'_opt'] = n.df(c)[attr] + n.sols[c].df[attr] = variables.map(variables_sol) - for c, attr in lookup.index: + n.sols = Dict() + for c, attr in n.variables.index.intersection(lookup.index): map_solution(c, attr) + # if nominal capcity was no variable set optimal value to nominal + for c, attr in lookup.query('nominal').index.difference(n.variables.index): + n.df(c)[attr+'_opt'] = n.df(c)[attr] + + # recalculate storageunit net dispatch if not n.df('StorageUnit').empty: c = 'StorageUnit' n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] #duals - def map_dual(c, attr, pnl): - sign = -1 if 'lower' in attr else 1 - if pnl: - set_from_frame(c, attr, get_con(n, c, attr, pop=pop).stack() - .map(sign * constraints_dual).unstack()) - else: - n.df(c)[attr] = get_con(n, c, attr, pop=pop).map(sign* constraints_dual) - if keep_shadowprices == False: keep_shadowprices = [] elif keep_shadowprices is None: keep_shadowprices = ['Bus', 'Line', 'GlobalConstraint'] - for (c, attr), pnl in n.constraints.pnl.items(): - if keep_shadowprices == True: - map_dual(c, attr, pnl) - elif c in keep_shadowprices: - map_dual(c, attr, pnl) + sp = n.constraints.index + if isinstance(keep_shadowprices, list): + sp = sp[sp.isin(keep_shadowprices, level=0)] + + + def map_dual(c, attr, predefined=True): + constraints = get_con(n, c, attr, pop=pop) + predefined = True + if c not in n.all_components: + predefined = False + n.duals[c] = n.duals[c] if c in n.duals else Dict(df=pd.DataFrame(), pnl={}) + sign = -1 if 'lower' in attr else 1 + if isinstance(constraints, pd.DataFrame): + # case that variables are timedependent + pnl = n.pnl(c) if predefined else n.duals[c].pnl + set_from_frame(pnl, attr, constraints.stack().map(sign * + constraints_dual).unstack()) else: - get_con(n, c, attr, pop=True) + # case that variables are static + if predefined: + n.df(c)[attr] = constraints.map(constraints_dual).fillna(n.df(c)[attr]) + else: + n.duals[c].df[attr] = constraints.map(constraints_dual) + + n.duals = Dict() + # extract shadow prices attached to components + for c, attr in sp: + map_dual(c, attr) + + # discard remaining if wanted + if not keep_references: + for c, attr in n.constraints.index.difference(sp): + get_con(n, c, attr, pop) #load if len(n.loads): - set_from_frame('Load', 'p', get_as_dense(n, 'Load', 'p_set', sns)) + set_from_frame(n.pnl('Load'), 'p', get_as_dense(n, 'Load', 'p_set', sns)) # recalculate injection ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 9c5ebb77d..50d885685 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -13,6 +13,7 @@ pyomo (see module linopt.py) """ +from .descriptors import Dict import pandas as pd import os, logging, re, io, subprocess import numpy as np @@ -193,17 +194,17 @@ def join_exprs(df): # references to vars and cons, rewrite this part to not store every reference # ============================================================================= -def _add_reference(n, df, c, attr, suffix, pnl=True): - attr_name = attr + suffix +def _add_reference(ref_dict, df, attr, pnl=True): if pnl: - if attr_name in n.pnl(c): - n.pnl(c)[attr_name][df.columns] = df + if attr in ref_dict.pnl: + ref_dict.pnl[attr][df.columns] = df else: - n.pnl(c)[attr_name] = df - if n.pnl(c)[attr_name].shape[1] == n.df(c).shape[0]: - n.pnl(c)[attr_name] = n.pnl(c)[attr_name].reindex(columns=n.df(c).index) + ref_dict.pnl[attr] = df else: - n.df(c).loc[df.index, attr_name] = df + if ref_dict.df.empty: + ref_dict.df[attr] = df + else: + ref_dict.df.loc[df.index, attr] = df def set_varref(n, variables, c, attr, pnl=True, spec=''): """ @@ -215,11 +216,13 @@ def set_varref(n, variables, c, attr, pnl=True, spec=''): dict of time-depending quantities, e.g. network.generators_t . """ if not variables.empty: + if c not in n.variables.index: + n.vars[c] = Dict(df=pd.DataFrame(), pnl=Dict()) if ((c, attr) in n.variables.index) and (spec != ''): n.variables.at[idx[c, attr], 'specification'] += ', ' + spec else: n.variables.loc[idx[c, attr], :] = [pnl, spec] - _add_reference(n, variables, c, attr, var_ref_suffix, pnl=pnl) + _add_reference(n.vars[c], variables, attr, pnl=pnl) def set_conref(n, constraints, c, attr, pnl=True, spec=''): """ @@ -231,12 +234,13 @@ def set_conref(n, constraints, c, attr, pnl=True, spec=''): dict of time-depending quantities, e.g. network.generators_t . """ if not constraints.empty: + if c not in n.constraints.index: + n.cons[c] = Dict(df=pd.DataFrame(), pnl=Dict()) if ((c, attr) in n.constraints.index) and (spec != ''): n.constraints.at[idx[c, attr], 'specification'] += ', ' + spec else: n.constraints.loc[idx[c, attr], :] = [pnl, spec] - _add_reference(n, constraints, c, attr, con_ref_suffix, pnl=pnl) - + _add_reference(n.cons[c], constraints, attr, pnl=pnl) def get_var(n, c, attr, pop=False): ''' @@ -257,14 +261,8 @@ def get_var(n, c, attr, pop=False): >>> get_var(n, 'Generator', 'p') ''' - if n.variables.at[idx[c, attr], 'pnl']: - if pop: - return n.pnl(c).pop(attr + var_ref_suffix) - return n.pnl(c)[attr + var_ref_suffix] - else: - if pop: - return n.df(c).pop(attr + var_ref_suffix) - return n.df(c)[attr + var_ref_suffix] + vvars = n.vars[c].pnl if n.variables.pnl[c, attr] else n.vars[c].df + return vvars.pop(attr) if pop else vvars[attr] def get_con(n, c, attr, pop=False): @@ -284,14 +282,8 @@ def get_con(n, c, attr, pop=False): ------- get_con(n, 'Generator', 'mu_upper') """ - if n.constraints.at[idx[c, attr], 'pnl']: - if pop: - return n.pnl(c).pop(attr + con_ref_suffix) - return n.pnl(c)[attr + con_ref_suffix] - else: - if pop: - return n.df(c).pop(attr + con_ref_suffix) - return n.df(c)[attr + con_ref_suffix] + cons = n.cons[c].pnl if n.constraints.pnl[c, attr] else n.cons[c].df + return cons.pop(attr) if pop else cons[attr] def clear_references(n): From d0f096fa3804e3be10896aee3c5df82259cb171a Mon Sep 17 00:00:00 2001 From: Fabian Date: Sat, 16 Nov 2019 12:40:36 +0100 Subject: [PATCH 084/111] linopt: - fix write cons and vars for one single element - add front user functions define_variables, define_constraints - remove pnl argument as unnecessary linopf: - remove pnl arguments --- pypsa/linopf.py | 30 ++++++++++++++++------------ pypsa/linopt.py | 52 +++++++++++++++++++++++++++++-------------------- 2 files changed, 49 insertions(+), 33 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index acd1913dc..e57f35520 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -26,13 +26,13 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, join_exprs, run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, - clear_references) + clear_references, define_constraints, define_variables) import pandas as pd import numpy as np -import gc, string, random, time, os, re, shutil +import gc, time, os, re, shutil from tempfile import mkstemp import logging @@ -60,7 +60,7 @@ def define_nominal_for_extendable_variables(n, c, attr): lower = n.df(c)[attr+'_min'][ext_i] upper = n.df(c)[attr+'_max'][ext_i] variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr, pnl=False) + set_varref(n, variables, c, attr) def define_dispatch_for_extendable_variables(n, sns, c, attr): @@ -79,8 +79,8 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): """ ext_i = get_extendable_i(n, c) if ext_i.empty: return - variables = write_bound(n, -np.inf, np.inf, axes=[sns, ext_i]) - set_varref(n, variables, c, attr, pnl=True, spec='extendables') + define_variables(n, -np.inf, np.inf, c, attr, axes=[sns, ext_i], + spec='extendables') def define_dispatch_for_non_extendable_variables(n, sns, c, attr): @@ -104,7 +104,7 @@ def define_dispatch_for_non_extendable_variables(n, sns, c, attr): lower = min_pu.mul(nominal_fix) upper = max_pu.mul(nominal_fix) variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr, pnl=True, spec='nonextendables') + set_varref(n, variables, c, attr, spec='nonextendables') def define_dispatch_for_extendable_constraints(n, sns, c, attr): @@ -131,12 +131,12 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), return_axes=True) constraints = write_constraint(n, lhs, '>=', rhs, axes) - set_conref(n, constraints, c, 'mu_upper', pnl=True, spec=attr) + set_conref(n, constraints, c, 'mu_upper', spec=attr) lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), return_axes=True) constraints = write_constraint(n, lhs, '<=', rhs, axes) - set_conref(n, constraints, c, 'mu_lower', pnl=True, spec=attr) + set_conref(n, constraints, c, 'mu_lower', spec=attr) def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): @@ -169,7 +169,7 @@ def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): if fix.empty: return lhs = linexpr((1, get_var(n, c, attr)[fix.index])) constraints = write_constraint(n, lhs, '=', fix) - set_conref(n, constraints, c, f'mu_{attr}_set', pnl) + set_conref(n, constraints, c, f'mu_{attr}_set') def define_ramp_limit_constraints(n, sns): @@ -433,7 +433,7 @@ def define_global_constraints(n, sns): con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + set_conref(n, con, 'GlobalConstraint', 'mu', name) # for the next two to we need a line carrier if len(n.global_constraints) > len(glcs): @@ -454,7 +454,7 @@ def define_global_constraints(n, sns): sense = glc.sense rhs = glc.constant con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + set_conref(n, con, 'GlobalConstraint', 'mu', name) #expansion cost limits glcs = n.global_constraints.query('type == ' @@ -471,7 +471,7 @@ def define_global_constraints(n, sns): sense = glc.sense rhs = glc.constant con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) - set_conref(n, con, 'GlobalConstraint', 'mu', False, name) + set_conref(n, con, 'GlobalConstraint', 'mu', name) def define_objective(n, sns): @@ -684,6 +684,12 @@ def map_dual(c, attr, predefined=True): if len(n.loads): set_from_frame(n.pnl('Load'), 'p', get_as_dense(n, 'Load', 'p_set', sns)) + #clean up vars and cons + for c in list(n.vars): + if n.vars[c].df.empty and n.vars[c].pnl == {}: n.vars.pop(c) + for c in list(n.cons): + if n.cons[c].df.empty and n.cons[c].pnl == {}: n.cons.pop(c) + # recalculate injection ca = [('Generator', 'p', 'bus' ), ('Store', 'p', 'bus'), ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 50d885685..5c825b98b 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -21,10 +21,34 @@ logger = logging.getLogger(__name__) +# ============================================================================= +# Front end function +# ============================================================================= + +def define_variables(n, lower, upper, name, attr='', axes=None, spec=''): + var = write_bound(n, lower, upper, axes) + set_varref(n, var, name, attr, spec=spec) + + +def define_constraints(n, lhs, sense, rhs, name, attr='', axes=None, spec=''): + con = write_constraint(n, lhs, sense, rhs, axes) + set_conref(n, con, name, attr, spec=spec) + # ============================================================================= # writing functions # ============================================================================= +def _get_handlers(axes, *maybearrays): + axes = [axes] if isinstance(axes, pd.Index) else axes + if axes is None: + axes, shape = broadcasted_axes(*maybearrays) + else: + shape = tuple(map(len, axes)) + length = np.prod(shape) + ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series + return axes, shape, length, ser_or_frame + + def write_bound(n, lower, upper, axes=None): """ Writer function for writing out mutliple variables at a time. If lower and @@ -33,19 +57,11 @@ def write_bound(n, lower, upper, axes=None): Return a series or frame with variable references. """ - axes = [axes] if isinstance(axes, pd.Index) else axes - if axes is None: - axes, shape = broadcasted_axes(lower, upper) - else: - shape = tuple(map(len, axes)) - ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = int(np.prod(shape)) + axes, shape, length, ser_or_frame = _get_handlers(axes, lower, upper) n._xCounter += length variables = np.array([f'x{x}' for x in range(n._xCounter - length, n._xCounter)], dtype=object).reshape(shape) lower, upper = _str_array(lower), _str_array(upper) -# for s in (lower + ' <= '+ variables + ' <= '+ upper + '\n').flatten(): -# n.bounds_f.write(s) n.bounds_f.write(join_exprs(lower + ' <= '+ variables + ' <= '+ upper + '\n')) return ser_or_frame(variables, *axes) @@ -57,21 +73,13 @@ def write_constraint(n, lhs, sense, rhs, axes=None): Return a series or frame with constraint references. """ - axes = [axes] if isinstance(axes, pd.Index) else axes - if axes is None: - axes, shape = broadcasted_axes(lhs, rhs) - else: - shape = tuple(map(len, axes)) - ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - length = int(np.prod(shape)) + axes, shape, length, ser_or_frame = _get_handlers(axes, lhs, sense, rhs) n._cCounter += length cons = np.array([f'c{x}' for x in range(n._cCounter - length, n._cCounter)], dtype=object).reshape(shape) if isinstance(sense, str): sense = '=' if sense == '==' else sense lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) -# for c in (cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n').flatten(): -# n.constraints_f.write(c) n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n')) return ser_or_frame(cons, *axes) @@ -94,7 +102,7 @@ def broadcasted_axes(*dfs): """ axes = [] - shape = () + shape = (1,) if set(map(type, dfs)) == {tuple}: dfs = sum(dfs, ()) @@ -206,7 +214,7 @@ def _add_reference(ref_dict, df, attr, pnl=True): else: ref_dict.df.loc[df.index, attr] = df -def set_varref(n, variables, c, attr, pnl=True, spec=''): +def set_varref(n, variables, c, attr, spec=''): """ Sets variable references to the network. If pnl is False it stores a series of variable names in the static @@ -216,6 +224,7 @@ def set_varref(n, variables, c, attr, pnl=True, spec=''): dict of time-depending quantities, e.g. network.generators_t . """ if not variables.empty: + pnl = variables.ndim == 2 if c not in n.variables.index: n.vars[c] = Dict(df=pd.DataFrame(), pnl=Dict()) if ((c, attr) in n.variables.index) and (spec != ''): @@ -224,7 +233,7 @@ def set_varref(n, variables, c, attr, pnl=True, spec=''): n.variables.loc[idx[c, attr], :] = [pnl, spec] _add_reference(n.vars[c], variables, attr, pnl=pnl) -def set_conref(n, constraints, c, attr, pnl=True, spec=''): +def set_conref(n, constraints, c, attr, spec=''): """ Sets constraint references to the network. If pnl is False it stores a series of constraints names in the static @@ -234,6 +243,7 @@ def set_conref(n, constraints, c, attr, pnl=True, spec=''): dict of time-depending quantities, e.g. network.generators_t . """ if not constraints.empty: + pnl = constraints.ndim == 2 if c not in n.constraints.index: n.cons[c] = Dict(df=pd.DataFrame(), pnl=Dict()) if ((c, attr) in n.constraints.index) and (spec != ''): From 41a47625d53472652bed896af2d73c445d0c43e0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sat, 16 Nov 2019 12:53:43 +0100 Subject: [PATCH 085/111] linopf: apply define_variables and define_constraints in code --- pypsa/linopf.py | 34 +++++++++++----------------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index e57f35520..f813adef9 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -59,8 +59,7 @@ def define_nominal_for_extendable_variables(n, c, attr): if ext_i.empty: return lower = n.df(c)[attr+'_min'][ext_i] upper = n.df(c)[attr+'_max'][ext_i] - variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr) + define_variables(n, lower, upper, c, attr) def define_dispatch_for_extendable_variables(n, sns, c, attr): @@ -79,8 +78,7 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): """ ext_i = get_extendable_i(n, c) if ext_i.empty: return - define_variables(n, -np.inf, np.inf, c, attr, axes=[sns, ext_i], - spec='extendables') + define_variables(n, -np.inf, np.inf, c, attr, axes=[sns, ext_i], spec='extendables') def define_dispatch_for_non_extendable_variables(n, sns, c, attr): @@ -103,8 +101,7 @@ def define_dispatch_for_non_extendable_variables(n, sns, c, attr): min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) lower = min_pu.mul(nominal_fix) upper = max_pu.mul(nominal_fix) - variables = write_bound(n, lower, upper) - set_varref(n, variables, c, attr, spec='nonextendables') + define_variables(n, lower, upper, c, attr, spec='nonextendables') def define_dispatch_for_extendable_constraints(n, sns, c, attr): @@ -135,8 +132,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), return_axes=True) - constraints = write_constraint(n, lhs, '<=', rhs, axes) - set_conref(n, constraints, c, 'mu_lower', spec=attr) + define_constraints(n, lhs, '<=', rhs, c, 'mu_lower', axes=axes, spec=attr) def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): @@ -189,8 +185,7 @@ def define_ramp_limit_constraints(n, sns): gens_i = rup_i & get_non_extendable_i(n, c) lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') - constraints = write_constraint(n, lhs, '<=', rhs) - set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='nonextendables') + define_constraints(n, lhs, '<=', rhs, c, 'mu_ramp_limit_up', spec='nonext.') #ext up gens_i = rup_i & get_extendable_i(n, c) @@ -198,15 +193,13 @@ def define_ramp_limit_constraints(n, sns): p_nom = get_var(n, c, 'p_nom')[gens_i] lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom), as_pandas=True) - constraints = write_constraint(n, lhs, '<=', 0) - set_conref(n, constraints, c, 'mu_ramp_limit_up', spec='extendables') + define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='ext.') #fix down gens_i = rdown_i & get_non_extendable_i(n, c) lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') - constraints = write_constraint(n, lhs, '>=', rhs) - set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='nonextendables') + define_constraints(n, lhs, '>=', rhs, c, 'mu_ramp_limit_down', spec='nonext.') #ext down gens_i = rdown_i & get_extendable_i(n, c) @@ -214,8 +207,7 @@ def define_ramp_limit_constraints(n, sns): p_nom = get_var(n, c, 'p_nom')[gens_i] lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (limit_pu, p_nom), as_pandas=True) - constraints = write_constraint(n, lhs, '>=', 0) - set_conref(n, constraints, c, 'mu_ramp_limit_down', spec='extendables') + define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='ext.') def define_nodal_balance_constraints(n, sns): @@ -255,8 +247,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): rhs = ((- get_as_dense(n, 'Load', 'p_set', sns) * n.loads.sign) .groupby(n.loads.bus, axis=1).sum() .reindex(columns=n.buses.index, fill_value=0)) - constraints = write_constraint(n, lhs, sense, rhs) - set_conref(n, constraints, 'Bus', 'marginal_price') + define_constraints(n, lhs, sense, rhs, 'Bus', 'marginal_price') def define_kirchhoff_constraints(n, sns): @@ -336,8 +327,7 @@ def masked_term(coeff, var, cols): rhs = -get_as_dense(n, c, 'inflow', sns).mul(eh) rhs.loc[sns[0], noncyclic_i] -= n.df(c).state_of_charge_initial[noncyclic_i] - constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'mu_state_of_charge') + define_constraints(n, lhs, '==', rhs, c, 'mu_state_of_charge') def define_store_constraints(n, sns): @@ -376,8 +366,7 @@ def masked_term(coeff, var, cols): rhs = pd.DataFrame(0, sns, stores_i) rhs.loc[sns[0], noncyclic_i] -= n.df(c)['e_initial'][noncyclic_i] - constraints = write_constraint(n, lhs, '==', rhs) - set_conref(n, constraints, c, 'mu_state_of_charge') + define_constraints(n, lhs, '==', rhs, c, 'mu_state_of_charge') def define_global_constraints(n, sns): @@ -431,7 +420,6 @@ def define_global_constraints(n, sns): lhs = lhs + '\n' + join_exprs(vals) rhs -= stores.carrier.map(emissions) @ stores.e_initial - con = write_constraint(n, lhs, glc.sense, rhs, axes=pd.Index([name])) set_conref(n, con, 'GlobalConstraint', 'mu', name) From 882c0306945d9be7212d54976a29c21bd730b691 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sat, 16 Nov 2019 16:02:37 +0100 Subject: [PATCH 086/111] linopt: add aligned_with_static_component function --- pypsa/linopf.py | 4 +++- pypsa/linopt.py | 15 ++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index f813adef9..d483dcd25 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -26,7 +26,8 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, join_exprs, run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, - clear_references, define_constraints, define_variables) + clear_references, define_constraints, define_variables, + align_with_static_component) import pandas as pd @@ -532,6 +533,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, for c, attr in lookup.query('not nominal and not handle_separately').index: define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) define_dispatch_for_extendable_variables(n, snapshots, c, attr) + align_with_static_component(n, c, attr) define_dispatch_for_extendable_constraints(n, snapshots, c, attr) # define_fixed_variable_constraints(n, snapshots, c, attr) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 5c825b98b..554cf23e9 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -111,11 +111,24 @@ def broadcasted_axes(*dfs): if isinstance(df, (pd.Series, pd.DataFrame)): if len(axes): assert (axes[-1] == df.axes[-1]).all(), ('Series or DataFrames ' - 'are not aligned') + 'are not aligned. Please make sure that all indexes and ' + 'columns of Series and DataFrames going into the linear ' + 'expression are equally sorted.') axes = df.axes if len(df.axes) > len(axes) else axes shape = tuple(map(len, axes)) return axes, shape +def align_with_static_component(n, c, attr): + """ + Alignment of time-dependent variables with static components. If c is a + pypsa.component name, it will sort the columns of the variable according + to the statid component. + """ + if c in n.all_components and (c, attr) in n.variables.index: + if not n.variables.pnl[c, attr]: return + if len(n.vars[c].pnl[attr].columns) != len(n.df(c).index): return + n.vars[c].pnl[attr] = n.vars[c].pnl[attr].reindex(columns=n.df(c).index) + def linexpr(*tuples, as_pandas=False, return_axes=False): """ From 4f4769d4e1c6a14d9dace1cf54e0bc71efbc4546 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 17 Nov 2019 15:41:04 +0100 Subject: [PATCH 087/111] linopt: resolve case for defining variables and constraints with pure numpy arrays and no axes being passed. --- pypsa/linopf.py | 6 ++--- pypsa/linopt.py | 70 +++++++++++++++++++++++-------------------------- 2 files changed, 35 insertions(+), 41 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index d483dcd25..4541584a8 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -25,9 +25,8 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, join_exprs, run_and_read_cbc, - run_and_read_gurobi, run_and_read_glpk, - clear_references, define_constraints, define_variables, - align_with_static_component) + run_and_read_gurobi, run_and_read_glpk, define_constraints, + define_variables, align_with_static_component) import pandas as pd @@ -787,7 +786,6 @@ def network_lopf(n, snapshots=None, solver_name="cbc", snapshots = _as_snapshots(n, snapshots) n.calculate_dependent_values() n.determine_network_topology() - clear_references(n) logger.info("Prepare linear problem") fdp, problem_fn = prepare_lopf(n, snapshots, keep_files, extra_functionality) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 554cf23e9..ea1cf7514 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -45,8 +45,7 @@ def _get_handlers(axes, *maybearrays): else: shape = tuple(map(len, axes)) length = np.prod(shape) - ser_or_frame = pd.DataFrame if len(shape) > 1 else pd.Series - return axes, shape, length, ser_or_frame + return axes, shape, length def write_bound(n, lower, upper, axes=None): @@ -57,13 +56,13 @@ def write_bound(n, lower, upper, axes=None): Return a series or frame with variable references. """ - axes, shape, length, ser_or_frame = _get_handlers(axes, lower, upper) + axes, shape, length = _get_handlers(axes, lower, upper) n._xCounter += length variables = np.array([f'x{x}' for x in range(n._xCounter - length, n._xCounter)], dtype=object).reshape(shape) lower, upper = _str_array(lower), _str_array(upper) n.bounds_f.write(join_exprs(lower + ' <= '+ variables + ' <= '+ upper + '\n')) - return ser_or_frame(variables, *axes) + return to_pandas(variables, *axes) def write_constraint(n, lhs, sense, rhs, axes=None): """ @@ -73,7 +72,7 @@ def write_constraint(n, lhs, sense, rhs, axes=None): Return a series or frame with constraint references. """ - axes, shape, length, ser_or_frame = _get_handlers(axes, lhs, sense, rhs) + axes, shape, length = _get_handlers(axes, lhs, sense, rhs) n._cCounter += length cons = np.array([f'c{x}' for x in range(n._cCounter - length, n._cCounter)], dtype=object).reshape(shape) @@ -81,16 +80,13 @@ def write_constraint(n, lhs, sense, rhs, axes=None): sense = '=' if sense == '==' else sense lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n')) - return ser_or_frame(cons, *axes) + return to_pandas(cons, *axes) # ============================================================================= # helpers, helper functions # ============================================================================= -var_ref_suffix = '_varref' # after solving replace with '_opt' -con_ref_suffix = '_conref' # after solving replace with '' - def broadcasted_axes(*dfs): """ Helper function which, from a collection of arrays, series, frames and other @@ -108,6 +104,7 @@ def broadcasted_axes(*dfs): dfs = sum(dfs, ()) for df in dfs: + shape = max(shape, np.asarray(df).shape) if isinstance(df, (pd.Series, pd.DataFrame)): if len(axes): assert (axes[-1] == df.axes[-1]).all(), ('Series or DataFrames ' @@ -115,9 +112,9 @@ def broadcasted_axes(*dfs): 'columns of Series and DataFrames going into the linear ' 'expression are equally sorted.') axes = df.axes if len(df.axes) > len(axes) else axes - shape = tuple(map(len, axes)) return axes, shape + def align_with_static_component(n, c, attr): """ Alignment of time-dependent variables with static components. If c is a @@ -180,15 +177,22 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) if np.prod(shape): for coeff, var in tuples: - expr += _str_array(coeff) + _str_array(var) + '\n' + expr = expr + _str_array(coeff) + _str_array(var) + '\n' if as_pandas: - twodims = len(shape) > 1 - return pd.DataFrame(expr, *axes) if twodims else pd.Series(expr, *axes) + return to_pandas(expr, *axes) if return_axes: return (expr, *axes) return expr +def to_pandas(array, *axes): + """ + Convert a numpy array to pandas.Series if 1-dimensional or to a + pandas.DataFrame if 2-dimensional. Provide index and columns if needed + """ + return pd.Series(array, *axes) if array.ndim == 1 else pd.DataFrame(array, *axes) + + def _str_array(array): if isinstance(array, (float, int)): array = f'+{float(array)} ' if array >= 0 else f'{float(array)} ' @@ -196,7 +200,7 @@ def _str_array(array): array = array.values if isinstance(array, np.ndarray): if not (array.dtype == object) and array.size: - signs = pd.Series(array) if array.ndim == 1 else pd.DataFrame(array) + signs = to_pandas(array) signs = (signs.pipe(np.sign) .replace([0, 1, -1], ['+', '+', '-']).values) array = signs + abs(array).astype(str) + ' ' @@ -230,11 +234,13 @@ def _add_reference(ref_dict, df, attr, pnl=True): def set_varref(n, variables, c, attr, spec=''): """ Sets variable references to the network. - If pnl is False it stores a series of variable names in the static - dataframe of the given component. The columns name is then given by the - attribute name attr and the globally define var_ref_suffix. - If pnl is True if stores the given frame of references in the component - dict of time-depending quantities, e.g. network.generators_t . + One-dimensional variable references will be collected at n.vars[c].df, + two-dimensional varaibles in n.vars[c].pnl + For example: + * nominal capacity variables for generators are stored in + `n.vars.Generator.df.p_nom` + * operational variables for generators are stored in + `n.vars.Generator.pnl.p` """ if not variables.empty: pnl = variables.ndim == 2 @@ -248,12 +254,14 @@ def set_varref(n, variables, c, attr, spec=''): def set_conref(n, constraints, c, attr, spec=''): """ - Sets constraint references to the network. - If pnl is False it stores a series of constraints names in the static - dataframe of the given component. The columns name is then given by the - attribute name attr and the globally define con_ref_suffix. - If pnl is True if stores the given frame of references in the component - dict of time-depending quantities, e.g. network.generators_t . + Sets variable references to the network. + One-dimensional constraint references will be collected at n.cons[c].df, + two-dimensional in n.cons[c].pnl + For example: + * constraints for nominal capacity variables for generators are stored in + `n.cons.Generator.df.mu_upper` + * operational capacity limits for generators are stored in + `n.cons.Generator.pnl.mu_upper` """ if not constraints.empty: pnl = constraints.ndim == 2 @@ -309,18 +317,6 @@ def get_con(n, c, attr, pop=False): return cons.pop(attr) if pop else cons[attr] -def clear_references(n): - for c in n.iterate_components(): - keys = list(c.pnl.keys()) - for k in keys: - if (con_ref_suffix in k) or - (var_ref_suffix in k): - c.pnl.pop(k) - if 'variables' in n.__dir__(): - del n.variables - if 'constraints' in n.__dir__(): - del n.constraints - - # ============================================================================= # solvers # ============================================================================= From 2e9eb107fe865beda137051b71dfac23070b4d2f Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 17 Nov 2019 16:39:20 +0100 Subject: [PATCH 088/111] linopf: better case differentiation for mapping duals --- pypsa/linopf.py | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 4541584a8..df5ff2cdb 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -640,24 +640,29 @@ def map_solution(c, attr): sp = sp[sp.isin(keep_shadowprices, level=0)] - def map_dual(c, attr, predefined=True): - constraints = get_con(n, c, attr, pop=pop) - predefined = True - if c not in n.all_components: - predefined = False - n.duals[c] = n.duals[c] if c in n.duals else Dict(df=pd.DataFrame(), pnl={}) + def map_dual(c, attr): + # If c is a pypsa component name the dual is store at n.pnl(c) + # or n.df(c). For the second case the index of the constraints have to + # be a subset of n.df(c).index otherwise the dual is stored at + # n.duals[c].df sign = -1 if 'lower' in attr else 1 - if isinstance(constraints, pd.DataFrame): - # case that variables are timedependent - pnl = n.pnl(c) if predefined else n.duals[c].pnl - set_from_frame(pnl, attr, constraints.stack().map(sign * - constraints_dual).unstack()) + constraints = get_con(n, c, attr, pop=pop) + is_pnl = isinstance(constraints, pd.DataFrame) + to_component = c in n.all_components + if is_pnl: + duals = constraints.stack().map(sign * constraints_dual).unstack() + if c not in n.duals and not to_component: + n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) + pnl = n.pnl(c) if to_component else n.duals[c].pnl + set_from_frame(pnl, attr, duals) else: - # case that variables are static - if predefined: - n.df(c)[attr] = constraints.map(constraints_dual).fillna(n.df(c)[attr]) - else: - n.duals[c].df[attr] = constraints.map(constraints_dual) + duals = constraints.map(constraints_dual) + if to_component: + to_component = (duals.index.isin(n.df(c).index).all()) + if c not in n.duals and not to_component: + n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) + df = n.df(c) if to_component else n.duals[c].df + df[attr] = duals n.duals = Dict() # extract shadow prices attached to components From 07f39903f23835b857c1e75eb8e14d3941e4fa03 Mon Sep 17 00:00:00 2001 From: Fabian Date: Sun, 17 Nov 2019 17:52:44 +0100 Subject: [PATCH 089/111] linopt: make as_pandas=True default in linexpr() --- pypsa/linopf.py | 51 ++++++++++++++++++++++++------------------------- pypsa/linopt.py | 28 +++++++++++++-------------- 2 files changed, 39 insertions(+), 40 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index df5ff2cdb..142a15fbf 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -157,13 +157,13 @@ def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): if attr + '_set' not in n.pnl(c): return fix = n.pnl(c)[attr + '_set'].unstack().dropna() if fix.empty: return - lhs = linexpr((1, get_var(n, c, attr).unstack()[fix.index])) + lhs = linexpr((1, get_var(n, c, attr).unstack()[fix.index]), as_pandas=False) constraints = write_constraint(n, lhs, '=', fix).unstack().T else: if attr + '_set' not in n.df(c): return fix = n.df(c)[attr + '_set'].dropna() if fix.empty: return - lhs = linexpr((1, get_var(n, c, attr)[fix.index])) + lhs = linexpr((1, get_var(n, c, attr)[fix.index]), as_pandas=False) constraints = write_constraint(n, lhs, '=', fix) set_conref(n, constraints, c, f'mu_{attr}_set') @@ -183,7 +183,7 @@ def define_ramp_limit_constraints(n, sns): #fix up gens_i = rup_i & get_non_extendable_i(n, c) - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') define_constraints(n, lhs, '<=', rhs, c, 'mu_ramp_limit_up', spec='nonext.') @@ -191,13 +191,12 @@ def define_ramp_limit_constraints(n, sns): gens_i = rup_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_up'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom), - as_pandas=True) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom)) define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='ext.') #fix down gens_i = rdown_i & get_non_extendable_i(n, c) - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), as_pandas=True) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') define_constraints(n, lhs, '>=', rhs, c, 'mu_ramp_limit_down', spec='nonext.') @@ -205,8 +204,7 @@ def define_ramp_limit_constraints(n, sns): gens_i = rdown_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_down'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (limit_pu, p_nom), - as_pandas=True) + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (limit_pu, p_nom)) define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='ext.') @@ -220,8 +218,7 @@ def bus_injection(c, attr, groupcol='bus', sign=1): #additional sign only necessary for branches in reverse direction if 'sign' in n.df(c): sign = sign * n.df(c).sign - expr = linexpr((sign, get_var(n, c, attr)), as_pandas=True)\ - .rename(columns=n.df(c)[groupcol]) + expr = linexpr((sign, get_var(n, c, attr))).rename(columns=n.df(c)[groupcol]) # drop empty bus2, bus3 if multiline link if c == 'Link': expr.drop(columns='', errors='ignore', inplace=True) @@ -261,7 +258,7 @@ def define_kirchhoff_constraints(n, sns): def cycle_flow(ds): ds = ds[lambda ds: ds!=0.].dropna() - vals = linexpr((ds, branch_vars[ds.index])) + '\n' + vals = linexpr((ds, branch_vars[ds.index]), as_pandas=False) + '\n' return vals.sum(1) constraints = [] @@ -316,7 +313,7 @@ def define_storage_unit_constraints(n, sns): lhs, *axes = linexpr(*coeff_var, return_axes=True) def masked_term(coeff, var, cols): - return linexpr((coeff[cols], var[cols]), as_pandas=True)\ + return linexpr((coeff[cols], var[cols]))\ .reindex(index=axes[0], columns=axes[1], fill_value='').values if ('StorageUnit', 'spill') in n.variables.index: @@ -357,7 +354,7 @@ def define_store_constraints(n, sns): lhs, *axes = linexpr(*coeff_var, return_axes=True) def masked_term(coeff, var, cols): - return linexpr((coeff[cols], var[cols]), as_pandas=True)\ + return linexpr((coeff[cols], var[cols]))\ .reindex(index=axes[0], columns=axes[1], fill_value='').values lhs += masked_term(eff_stand, previous_e_cyclic, cyclic_i) @@ -398,7 +395,8 @@ def define_global_constraints(n, sns): if not gens.empty: em_pu = gens.carrier.map(emissions)/gens.efficiency em_pu = n.snapshot_weightings.to_frame() @ em_pu.to_frame('weightings').T - vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index])) + vals = linexpr((em_pu, get_var(n, 'Generator', 'p')[gens.index]), + as_pandas=False) lhs += join_exprs(vals) #storage units @@ -406,8 +404,9 @@ def define_global_constraints(n, sns): 'not cyclic_state_of_charge') sus_i = sus.index if not sus.empty: - vals = linexpr((-sus.carrier.map(emissions), - get_var(n, 'StorageUnit', 'state_of_charge').loc[sns[-1], sus_i])) + coeff_val = (-sus.carrier.map(emissions), get_var(n, 'StorageUnit', + 'state_of_charge').loc[sns[-1], sus_i]) + vals = linexpr(coeff_val, as_pandas=False) lhs = lhs + '\n' + join_exprs(vals) rhs -= sus.carrier.map(emissions) @ sus.state_of_charge_initial @@ -415,8 +414,9 @@ def define_global_constraints(n, sns): n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) stores = n.stores.query('carrier in @emissions.index and not e_cyclic') if not stores.empty: - vals = linexpr((-stores.carrier.map(emissions), - get_var(n, 'Store', 'e').loc[sns[-1], stores.index])) + coeff_val = (-stores.carrier.map(emissions), get_var(n, 'Store', 'e') + .loc[sns[-1], stores.index]) + vals = linexpr(coeff_val, as_pandas=False) lhs = lhs + '\n' + join_exprs(vals) rhs -= stores.carrier.map(emissions) @ stores.e_initial @@ -436,7 +436,8 @@ def define_global_constraints(n, sns): for c, attr in (('Line', 's_nom'), ('Link', 'p_nom')): ext_i = n.df(c).query(f'carrier in @car and {attr}_extendable').index if ext_i.empty: continue - v = linexpr((n.df(c).length[ext_i], get_var(n, c, attr)[ext_i])) + v = linexpr((n.df(c).length[ext_i], get_var(n, c, attr)[ext_i]), + as_pandas=False) lhs += join_exprs(v) + '\n' if lhs == '': continue sense = glc.sense @@ -453,7 +454,8 @@ def define_global_constraints(n, sns): for c, attr in (('Line', 's_nom'), ('Link', 'p_nom')): ext_i = n.df(c).query(f'carrier in @car and {attr}_extendable').index if ext_i.empty: continue - v = linexpr((n.df(c).capital_cost[ext_i], get_var(n, c, attr)[ext_i])) + v = linexpr((n.df(c).capital_cost[ext_i], get_var(n, c, attr)[ext_i]), + as_pandas=False) lhs += join_exprs(v) + '\n' if lhs == '': continue sense = glc.sense @@ -471,7 +473,7 @@ def define_objective(n, sns): nom_attr = nominal_attrs.items() constant = sum(n.df(c)[attr] @ n.df(c).capital_cost for c, attr in nom_attr) object_const = write_bound(n, constant, constant) - n.objective_f.write(linexpr((1, object_const))[0]) + n.objective_f.write(linexpr((1, object_const), as_pandas=False)[0]) for c, attr in lookup.query('marginal_cost').index: cost = (get_as_dense(n, c, 'marginal_cost', sns) @@ -479,16 +481,13 @@ def define_objective(n, sns): .mul(n.snapshot_weightings[sns], axis=0)) if cost.empty: continue terms = linexpr((cost, get_var(n, c, attr).loc[sns, cost.columns])) - for t in terms.flatten(): - n.objective_f.write(t) + n.objective_f.write(join_exprs(terms)) #investment for c, attr in nominal_attrs.items(): cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] if cost.empty: continue terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' - for t in terms.flatten(): - n.objective_f.write(t) - + n.objective_f.write(join_exprs(terms)) def prepare_lopf(n, snapshots=None, keep_files=False, diff --git a/pypsa/linopt.py b/pypsa/linopt.py index ea1cf7514..e6cb1622c 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -127,13 +127,13 @@ def align_with_static_component(n, c, attr): n.vars[c].pnl[attr] = n.vars[c].pnl[attr].reindex(columns=n.df(c).index) -def linexpr(*tuples, as_pandas=False, return_axes=False): +def linexpr(*tuples, as_pandas=True, return_axes=False): """ Elementwise concatenation of tuples in the form (coefficient, variables). - Coefficient and variables can be arrays, series or frames. Returns - a np.ndarray of strings. If return_axes is set to True and a pd.Series or - pd.DataFrame was past, the corresponding index (and column if existent) is - returned additionaly. + Coefficient and variables can be arrays, series or frames. Per default + returns a pandas.Series or pandas.DataFrame of strings. If return_axes + is set to True the return value is split into values and axes, where values + are the numpy.array and axes a tuple containing index and column if present. Parameters ---------- @@ -141,7 +141,7 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): Each tuple must of the form (coeff, var), where * coeff is a numerical value, or a numerical array, series, frame * var is a str or a array, series, frame of variable strings - as_pandas : bool, default False + as_pandas : bool, default True Whether to return to resulting array as a series, if 1-dimensional, or a frame, if 2-dimensional. Supersedes return_axes argument. return_axes: Boolean, default False @@ -158,11 +158,6 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): Create the linear expression strings - >>> linexpr((coeff1, var1), (coeff2, var2)) - array(['+1.0 a1 -0.5 b1', '+1.0 a2 -0.3 b2', '+1.0 a3 -1.0 b3'], dtype=object) - - For turning the result into a series or frame again: - >>> linexpr((coeff1, var1), (coeff2, var2), as_pandas=True) 0 +1.0 a1 -0.5 b1 1 +1.0 a2 -0.3 b2 @@ -170,7 +165,12 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): dtype: object For a further step the resulting frame can be used as the lhs of - :func:`pypsa.linopt.write_contraint` + :func:`pypsa.linopt.define_constraints` + + For retrieving only the values: + + >>> linexpr((coeff1, var1), (coeff2, var2), as_pandas=False) + array(['+1.0 a1 -0.5 b1', '+1.0 a2 -0.3 b2', '+1.0 a3 -1.0 b3'], dtype=object) """ axes, shape = broadcasted_axes(*tuples) @@ -178,10 +178,10 @@ def linexpr(*tuples, as_pandas=False, return_axes=False): if np.prod(shape): for coeff, var in tuples: expr = expr + _str_array(coeff) + _str_array(var) + '\n' - if as_pandas: - return to_pandas(expr, *axes) if return_axes: return (expr, *axes) + if as_pandas: + return to_pandas(expr, *axes) return expr From 2e3201d99083bccc64d33ed199b5ed54107a7d54 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 19 Nov 2019 17:04:58 +0100 Subject: [PATCH 090/111] update doc --- doc/optimal_power_flow.rst | 20 +++---- pypsa/linopt.py | 120 ++++++++++++++++++++++++++++++++++--- 2 files changed, 122 insertions(+), 18 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 398ca19aa..5a412c640 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -597,28 +597,26 @@ An additional constraint can easily be implemented by using the funtions * ``pypsa.linopt.get_var`` for getting the variables which should be included in the constraint * ``pypsa.linopt.linexpr`` for creating linear expressions for the left hand side (lhs) of the constraint. Note that only the lhs includes all terms with variables, the rhs is a constant. -* ``pypsa.linopt.write_constraint`` for writing out the constraint to the ``.lp`` file -* ``pypsa.linopt.set_conref`` for attaching the constraint to the network itself, this only necessary if a shadow price should be extracted after solving +* ``pypsa.linopt.define_constraints`` for defining a network constraint. The are functions defined as such: .. automethod:: pypsa.linopt.get_var .. automethod:: pypsa.linopt.linexpr -.. automethod:: pypsa.linopt.write_constraint -.. automethod:: pypsa.linopt.set_conref +.. automethod:: pypsa.linopt.define_constraints -The function ``extra_postprocessing`` is not necessary when pyomo is deactivated. For retrieving additional shadow prices, just pass the component name, to which the constraint is attached, to the ``keep_shadowprices`` parameter of the ``lopf`` function. +The function ``extra_postprocessing`` is not necessary when pyomo is deactivated. For retrieving additional shadow prices, just pass the name of the constraint, to which the constraint is attached, to the ``keep_shadowprices`` parameter of the ``lopf`` function. -Fixing variables ----------------- +.. Fixing variables +.. ---------------- -This feature is only valid if pyomo is disabled in the lopf function (i.e. ``pyomo=False``). It is possible to fix all variables to specific values. Create a pandas DataFrame or a column with the same name as the variable but with suffix '_set'. For all not ``NaN`` values additional constraints will be build to fix the variables. +.. This feature is only valid if pyomo is disabled in the lopf function (i.e. ``pyomo=False``). It is possible to fix all variables to specific values. Create a pandas DataFrame or a column with the same name as the variable but with suffix '_set'. For all not ``NaN`` values additional constraints will be build to fix the variables. -For example let's say, we want to fix the output of a single generator 'gas1' to 200 MW for all snapshots. Then we can add a dataframe ``p_set`` to network.generators_t with the according value and index. +.. For example let's say, we want to fix the output of a single generator 'gas1' to 200 MW for all snapshots. Then we can add a dataframe ``p_set`` to network.generators_t with the according value and index. - >>> network.generators_t['p_set'] = pd.DataFrame(200, index=network.snapshots, columns=['gas1']) +.. >>> network.generators_t['p_set'] = pd.DataFrame(200, index=network.snapshots, columns=['gas1']) -The lopf will now build extra constraints to fix the ``p`` variables of generator 'gas1' to 200. In the same manner, we can fix the variables only for some specific snapshots. This is applicable to all variables, also ``state_of_charge`` for storage units or ``p`` for links. Static investment variables can be fixed via adding additional columns, e.g. a ``s_nom_set`` column to ``network.lines``. +.. The lopf will now build extra constraints to fix the ``p`` variables of generator 'gas1' to 200. In the same manner, we can fix the variables only for some specific snapshots. This is applicable to all variables, also ``state_of_charge`` for storage units or ``p`` for links. Static investment variables can be fixed via adding additional columns, e.g. a ``s_nom_set`` column to ``network.lines``. diff --git a/pypsa/linopt.py b/pypsa/linopt.py index e6cb1622c..bb40e14fa 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -22,15 +22,122 @@ logger = logging.getLogger(__name__) # ============================================================================= -# Front end function +# Front end functions # ============================================================================= def define_variables(n, lower, upper, name, attr='', axes=None, spec=''): + """ + Defines variable(s) for pypsa-network with given lower bound(s) and upper + bound(s). The variables are stored in the network object under n.vars with + key of the variable name. If multiple variables are defined at ones, at + least one of lower and upper has to be an array (including pandas) of + shape > (1,) or axes have to define the dimensions of the variables. + + Parameter + --------- + n : pypsa.Network + lower : pd.Series/pd.DataFrame/np.array/str/float + lower bound(s) for the variable(s) + upper : pd.Series/pd.DataFrame/np.array/str/float + upper bound(s) for the variable(s) + name : str + general name of the variable (or component which the variable is + referring to). The variable will then be stored under: + * n.vars[name].pnl if the variable is two-dimensional + * n.vars[name].df if the variable is one-dimensional + attr : str default '' + Specifying name of the variable, defines under which name the variable(s) + are stored in n.vars[name].pnl if two-dimensional or in n.vars[name].df + if one-dimensional + axes : pd.Index or tuple of pd.Index objects, default None + Specifies the axes and therefore the shape of the variables if bounds + are single strings or floats. This is helpful when mutliple variables + have the same upper and lower bound. + + + Example + -------- + + Let's say we want to define a demand-side-managed load at each bus of + network n, which has a minimum of 0 and a maximum of 10. We then define + lower bound (lb) and upper bound (ub) and pass it to define_variables + + >>> from pypsa.linopt import define_variables, get_var + >>> lb = pd.DataFrame(0, index=n.snapshots, columns=n.buses.index) + >>> ub = pd.DataFrame(10, index=n.snapshots, columns=n.buses.index) + >>> define_variables(n, lb, ub, 'DSM', 'variableload') + + Now the variables can be accessed by :func:`pypsa.linopt.get_var` using + + >>> variables = get_var(n, 'DSM', 'variableload') + + Note that this is usefull for the `extra_functionality` argument. + """ var = write_bound(n, lower, upper, axes) set_varref(n, var, name, attr, spec=spec) def define_constraints(n, lhs, sense, rhs, name, attr='', axes=None, spec=''): + """ + Defines constraint(s) for pypsa-network with given left hand side (lhs), + sense and right hand side (rhs). The constraints are stored in the network + object under n.cons with key of the constraint name. If multiple constraints + are defined at ones only using np.arrays then the axes argument can be used + for defining the axes for the constraints (this is espececially recommended + for time-dependent constraints). If one of lhs, sense and rhs is a + pd.Series/pd.DataFrame the axes argument is not necessary. + + Parameters + ---------- + n: pypsa.Network + lhs: pd.Series/pd.DataFrame/np.array/str/float + left hand side of the constraint(s), created with + :func:`pypsa.linot.linexpr`. + sense: pd.Series/pd.DataFrame/np.array/str/float + sense(s) of the constraint(s) + rhs: pd.Series/pd.DataFrame/np.array/str/float + right hand side of the constraint(s), must only contain pure constants, + no variables + name: str + general name of the constraint (or component which the constraint is + referring to). The constraint will then be stored under: + + * n.cons[name].pnl if the constraint is two-dimensional + * n.cons[name].df if the constraint is one-dimensional + attr: str default '' + Specifying name of the constraint, defines under which name the + constraint(s) are stored in n.cons[name].pnl if two-dimensional or in + n.cons[name].df if one-dimensional + axes: pd.Index or tuple of pd.Index objects, default None + Specifies the axes if all of lhs, sense and rhs are np.arrays or single + strings or floats. + + + Example + -------- + + Let's say we want to constraint all gas generators to a maximum of 100 MWh + during the first 10 snapshots. We then firstly get all operational variables + for this subset and constraint there sum to less equal 100. + + >>> from pypsa.linopt import get_var, linexpr, defin_constraints + >>> gas_i = n.generators.query('carrier == "Natural Gas"').index + >>> gas_vars = get_var(n, 'Generator', 'p').loc[n.snapshots[:10], gas_i] + >>> lhs = linexpr((1, gas_vars)).sum().sum() + >>> define_(n, lhs, '<=', 100, 'Generator', 'gas_power_limit') + + Now the constraint references can be accessed by + :func:`pypsa.linopt.get_con` using + + >>> cons = get_var(n, 'Generator', 'gas_power_limit') + + Under the hook they are stored in n.cons.Generator.pnl.gas_power_limit. + For retrieving their shadow prices add the general name of the constraint + to the keep_shadowprices argument. + + Note that this is usefull for the `extra_functionality` argument. + + """ con = write_constraint(n, lhs, sense, rhs, axes) set_conref(n, con, name, attr, spec=spec) @@ -54,7 +161,6 @@ def write_bound(n, lower, upper, axes=None): upper are floats it demands to give pass axes, a tuple of (index, columns) or (index), for creating the variable of same upper and lower bounds. Return a series or frame with variable references. - """ axes, shape, length = _get_handlers(axes, lower, upper) n._xCounter += length @@ -70,7 +176,6 @@ def write_constraint(n, lhs, sense, rhs, axes=None): constraints file. If lower and upper are numpy.ndarrays it axes must not be None but a tuple of (index, columns) or (index). Return a series or frame with constraint references. - """ axes, shape, length = _get_handlers(axes, lhs, sense, rhs) n._cCounter += length @@ -137,10 +242,11 @@ def linexpr(*tuples, as_pandas=True, return_axes=False): Parameters ---------- - tulples: tuple of tuples + tuples: tuple of tuples Each tuple must of the form (coeff, var), where - * coeff is a numerical value, or a numerical array, series, frame - * var is a str or a array, series, frame of variable strings + + * coeff is a numerical value, or a numerical array, series, frame + * var is a str or a array, series, frame of variable strings as_pandas : bool, default True Whether to return to resulting array as a series, if 1-dimensional, or a frame, if 2-dimensional. Supersedes return_axes argument. @@ -158,7 +264,7 @@ def linexpr(*tuples, as_pandas=True, return_axes=False): Create the linear expression strings - >>> linexpr((coeff1, var1), (coeff2, var2), as_pandas=True) + >>> linexpr((coeff1, var1), (coeff2, var2)) 0 +1.0 a1 -0.5 b1 1 +1.0 a2 -0.3 b2 2 +1.0 a3 -1.0 b3 From 90fdeb7f927a7227e05bc881eaf6f7d6f5944804 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Nov 2019 12:36:50 +0100 Subject: [PATCH 091/111] * introduce get_dual and get_sol function for easy handling of dual post solving * update doc string --- pypsa/components.py | 34 +++++++++++----------- pypsa/linopf.py | 71 +++++++++++++++++++++++++-------------------- pypsa/linopt.py | 52 +++++++++++++++++++++++++++++++++ 3 files changed, 109 insertions(+), 48 deletions(-) diff --git a/pypsa/components.py b/pypsa/components.py index 22ae34205..4b7ec8f86 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -441,50 +441,50 @@ def lopf(self, snapshots=None, pyomo=True, solver_name="glpk", ---------------- ptdf_tolerance : float - Only when pyomo is True. + Only taking effect when pyomo is True. Value below which PTDF entries are ignored free_memory : set, default {'pyomo'} - Only when pyomo is True. + Only taking effect when pyomo is True. Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series data away while the solver runs (as a pickle to disk) and/or free `pyomo` data after the solution has been extracted. solver_io : string, default None - Only when pyomo is True. + Only taking effect when pyomo is True. Solver Input-Output option, e.g. "python" to use "gurobipy" for solver_name="gurobi" skip_pre : bool, default False - Only when pyomo is True. + Only taking effect when pyomo is True. Skip the preliminary steps of computing topology, calculating dependent values and finding bus controls. extra_postprocessing : callable function + Only taking effect when pyomo is True. This function must take three arguments `extra_postprocessing(network,snapshots,duals)` and is called after the model has solved and the results are extracted. It allows the user to extract further information about the solution, such as additional shadow prices. warmstart : bool or string, default False - Only when pyomo is False. + Only taking effect when pyomo is False. Use this to warmstart the optimization. Pass a string which gives the path to the basis file. If set to True, a path to a basis file must be given in network.basis_fn. store_basis : bool, default True - Only when pyomo is False. + Only taking effect when pyomo is False. Whether to store the basis of the optimization results. If True, the path to the basis file is saved in network.basis_fn. Note that a basis can only be stored if simplex, dual-simplex, or barrier *with* crossover is used for solving. keep_references : bool, default False - Only when pyomo is False. + Only taking effect when pyomo is False. Keep the references of variable and constraint names withing the - network, e.g. n.generators_t.p_varref - useful for constructing - extra_functionality or debugging - keep_shadowprices : bool or list of component names, default None - Only when pyomo is False. - Keep shadow prices for all constraints, if set to True. - These are stored at e.g. n.generators_t.mu_upper for upper limit - of p_nom. If a list of component names is passed, shadow - prices of variables attached to those are extracted. If set to None, - components default to ['Bus', 'Line', 'GlobalConstraint'] + network. These can be looked up in `n.vars` and `n.cons` after solving. + keep_shadowprices : bool or list of component names + Only taking effect when pyomo is False. + Keep shadow prices for all constraints, if set to True. If a list + is passed the shadow prices will only be parsed for those constraint + names. Defaults to ['Bus', 'Line', 'GlobalConstraint']. + After solving, the shadow prices can be retrieved using + :func:`pypsa.linopt.get_dual` with corresponding name """ args = {'snapshots': snapshots, 'keep_files': keep_files, @@ -985,7 +985,7 @@ def bad_by_type(branch, attr): c.list_name, attr, bad) bad = c.df.index[(c.df["x"] == 0.) & (c.df["r"] == 0.) & - c.df.apply(bad_by_type, args=('x',), axis=1) & + c.df.apply(bad_by_type, args=('x',), axis=1) & c.df.apply(bad_by_type, args=('r',), axis=1)] if len(bad) > 0: logger.warning("The following %s have zero series impedance, which will break the load flow:\n%s", diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 142a15fbf..776b0d197 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -41,6 +41,7 @@ lookup = pd.read_csv(os.path.join(os.path.dirname(__file__), 'variables.csv'), index_col=['component', 'variable']) + def define_nominal_for_extendable_variables(n, c, attr): """ Initializes variables for nominal capacities for a given component and a @@ -181,26 +182,26 @@ def define_ramp_limit_constraints(n, sns): p = get_var(n, c, 'p').loc[sns[1:]] p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]] - #fix up + # fix up gens_i = rup_i & get_non_extendable_i(n, c) lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') define_constraints(n, lhs, '<=', rhs, c, 'mu_ramp_limit_up', spec='nonext.') - #ext up + # ext up gens_i = rup_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_up'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom)) define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='ext.') - #fix down + # fix down gens_i = rdown_i & get_non_extendable_i(n, c) lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') define_constraints(n, lhs, '>=', rhs, c, 'mu_ramp_limit_down', spec='nonext.') - #ext down + # ext down gens_i = rdown_i & get_extendable_i(n, c) limit_pu = n.df(c)['ramp_limit_down'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] @@ -215,7 +216,7 @@ def define_nodal_balance_constraints(n, sns): """ def bus_injection(c, attr, groupcol='bus', sign=1): - #additional sign only necessary for branches in reverse direction + # additional sign only necessary for branches in reverse direction if 'sign' in n.df(c): sign = sign * n.df(c).sign expr = linexpr((sign, get_var(n, c, attr))).rename(columns=n.df(c)[groupcol]) @@ -289,7 +290,7 @@ def define_storage_unit_constraints(n, sns): sus_i = n.storage_units.index if sus_i.empty: return c = 'StorageUnit' - #spillage + # spillage upper = get_as_dense(n, c, 'inflow', sns).loc[:, lambda df: df.max() > 0] spill = write_bound(n, 0, upper) set_varref(n, spill, 'StorageUnit', 'spill') @@ -390,7 +391,7 @@ def define_global_constraints(n, sns): if emissions.empty: continue - #generators + # generators gens = n.generators.query('carrier in @emissions.index') if not gens.empty: em_pu = gens.carrier.map(emissions)/gens.efficiency @@ -399,7 +400,7 @@ def define_global_constraints(n, sns): as_pandas=False) lhs += join_exprs(vals) - #storage units + # storage units sus = n.storage_units.query('carrier in @emissions.index and ' 'not cyclic_state_of_charge') sus_i = sus.index @@ -410,7 +411,7 @@ def define_global_constraints(n, sns): lhs = lhs + '\n' + join_exprs(vals) rhs -= sus.carrier.map(emissions) @ sus.state_of_charge_initial - #stores + # stores n.stores['carrier'] = n.stores.bus.map(n.buses.carrier) stores = n.stores.query('carrier in @emissions.index and not e_cyclic') if not stores.empty: @@ -426,7 +427,7 @@ def define_global_constraints(n, sns): # for the next two to we need a line carrier if len(n.global_constraints) > len(glcs): n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) - #expansion limits + # expansion limits glcs = n.global_constraints.query('type == ' '"transmission_volume_expansion_limit"') substr = lambda s: re.sub('[\[\]\(\)]', '', s) @@ -445,7 +446,7 @@ def define_global_constraints(n, sns): con = write_constraint(n, lhs, sense, rhs, axes=pd.Index([name])) set_conref(n, con, 'GlobalConstraint', 'mu', name) - #expansion cost limits + # expansion cost limits glcs = n.global_constraints.query('type == ' '"transmission_expansion_cost_limit"') for name, glc in glcs.iterrows(): @@ -482,7 +483,7 @@ def define_objective(n, sns): if cost.empty: continue terms = linexpr((cost, get_var(n, c, attr).loc[sns, cost.columns])) n.objective_f.write(join_exprs(terms)) - #investment + # investment for c, attr in nominal_attrs.items(): cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] if cost.empty: continue @@ -556,7 +557,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, for f, fd in (('bounds_f', fdb), ('constraints_f', fdc), ('objective_f', fdo)): getattr(n, f).close(); delattr(n, f); os.close(fd) - #concate files + # concate files with open(problem_fn, 'wb') as wfd: for f in [objective_fn, constraints_fn, bounds_fn]: with open(f,'rb') as fd: @@ -591,9 +592,10 @@ def map_solution(c, attr): if (c, attr) not in lookup.index: predefined = False n.sols[c] = n.sols[c] if c in n.sols else Dict(df=pd.DataFrame(), pnl={}) - + n.solutions.at[(c, attr), 'in_comp'] = predefined if isinstance(variables, pd.DataFrame): # case that variables are timedependent + n.solutions.at[(c, attr), 'pnl'] = True pnl = n.pnl(c) if predefined else n.sols[c].pnl values = variables.stack().map(variables_sol).unstack() if c in n.passive_branch_components: @@ -609,13 +611,16 @@ def map_solution(c, attr): set_from_frame(pnl, attr, values) else: # case that variables are static + n.solutions.at[(c, attr), 'pnl'] = False + sol = variables.map(variables_sol) if predefined: - n.df(c)[attr + 'opt'] = variables.map(variables_sol)\ - .fillna(n.df(c)[attr]) + non_ext = n.df(c)[attr] + n.df(c)[attr + '_opt'] = sol.reindex(non_ext.index).fillna(non_ext) else: - n.sols[c].df[attr] = variables.map(variables_sol) + n.sols[c].df[attr] = sol n.sols = Dict() + n.solutions = pd.DataFrame(index=n.variables.index, columns=['in_comp', 'pnl']) for c, attr in n.variables.index.intersection(lookup.index): map_solution(c, attr) @@ -628,17 +633,17 @@ def map_solution(c, attr): c = 'StorageUnit' n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] - #duals + # duals if keep_shadowprices == False: keep_shadowprices = [] - elif keep_shadowprices is None: - keep_shadowprices = ['Bus', 'Line', 'GlobalConstraint'] +# # TODO move to argdefault +# elif keep_shadowprices is None: +# keep_shadowprices = ['Bus', 'Line', 'GlobalConstraint'] sp = n.constraints.index if isinstance(keep_shadowprices, list): sp = sp[sp.isin(keep_shadowprices, level=0)] - def map_dual(c, attr): # If c is a pypsa component name the dual is store at n.pnl(c) # or n.df(c). For the second case the index of the constraints have to @@ -647,23 +652,28 @@ def map_dual(c, attr): sign = -1 if 'lower' in attr else 1 constraints = get_con(n, c, attr, pop=pop) is_pnl = isinstance(constraints, pd.DataFrame) + n.dualities.at[(c, attr), 'pnl'] = is_pnl to_component = c in n.all_components if is_pnl: + n.dualities.at[(c, attr), 'in_comp'] = to_component duals = constraints.stack().map(sign * constraints_dual).unstack() if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) pnl = n.pnl(c) if to_component else n.duals[c].pnl set_from_frame(pnl, attr, duals) else: + # here to_component can change duals = constraints.map(constraints_dual) if to_component: to_component = (duals.index.isin(n.df(c).index).all()) + n.dualities.at[(c, attr), 'in_comp'] = to_component if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) df = n.df(c) if to_component else n.duals[c].df df[attr] = duals n.duals = Dict() + n.dualities = pd.DataFrame(index=sp, columns=['in_comp', 'pnl']) # extract shadow prices attached to components for c, attr in sp: map_dual(c, attr) @@ -713,8 +723,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", solver_logfile=None, extra_functionality=None, extra_postprocessing=None, formulation="kirchhoff", keep_references=False, keep_files=False, - keep_shadowprices=None, solver_options=None, - warmstart=False, store_basis=True): + keep_shadowprices=['Bus', 'Line', 'GlobalConstraint'], + solver_options=None, warmstart=False, store_basis=True): """ Linear optimal power flow for a group of snapshots. @@ -763,14 +773,13 @@ def network_lopf(n, snapshots=None, solver_name="cbc", *with* crossover is used for solving. keep_references : bool, default False Keep the references of variable and constraint names withing the - network, e.g. n.generators_t.p_varref - useful for constructing - extra_functionality or debugging - keep_shadowprices : bool or list of component names, default None - Keep shadow prices for all constraints, if set to True. - These are stored at e.g. n.generators_t.mu_upper for upper limit - of p_nom. If a list of component names is passed, shadow - prices of variables attached to those are extracted. If set to None, - components default to ['Bus', 'Line', 'GlobalConstraint'] + network. These can be looked up in `n.vars` and `n.cons` after solving. + keep_shadowprices : bool or list of component names + Keep shadow prices for all constraints, if set to True. If a list + is passed the shadow prices will only be parsed for those constraint + names. Defaults to ['Bus', 'Line', 'GlobalConstraint']. + After solving, the shadow prices can be retrieved using + :func:`pypsa.linopt.get_dual` with corresponding name """ supported_solvers = ["cbc", "gurobi", 'glpk', 'scs'] diff --git a/pypsa/linopt.py b/pypsa/linopt.py index bb40e14fa..3ffaef9e4 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -423,6 +423,58 @@ def get_con(n, c, attr, pop=False): return cons.pop(attr) if pop else cons[attr] +def get_dual(n, name, attr=''): + """ + Retrieves shadow price for a given constraint. Note that for retrieving + shadow prices of a custom constraint, its name has to be passed to + `keep_references` in the lopf, or `keep_references` has to be set to True. + Note that a lookup of all stored shadow prices is given in n.dualities. + + Parameters + ---------- + n : pypsa.Network + c : str + constraint name to which the constraint belongs + attr: str + attribute name of the constraints + + Example + ------- + get_dual(n, 'Generator', 'mu_upper') + """ + pnl = n.dualities.at[(name, attr), 'pnl'] + if n.dualities.at[(name, attr), 'in_comp']: + return n.pnl(name)[attr] if pnl else n.df(name)[attr + '_opt'] + else: + return n.duals[name].pnl[attr] if pnl else n.duals[name].df[attr] + + +def get_sol(n, name, attr=''): + """ + Retrieves solution for a given variable. Note that a lookup of all stored + solutions is given in n.solutions. + + + Parameters + ---------- + n : pypsa.Network + c : str + general variable name (or component name if variable is attached to a + component) + attr: str + attribute name of the variable + + Example + ------- + get_dual(n, 'Generator', 'mu_upper') + """ + pnl = n.solutions.at[(name, attr), 'pnl'] + if n.solutions.at[(name, attr), 'in_comp']: + return n.pnl(name)[attr] if pnl else n.df(name)[attr + '_opt'] + else: + return n.sols[name].pnl[attr] if pnl else n.sols[name].df[attr] + + # ============================================================================= # solvers # ============================================================================= From a57039df2c0116f043d9ff3389f95c887324088a Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Nov 2019 15:35:41 +0100 Subject: [PATCH 092/111] add example, fix static 'opt' value --- examples/lopf_with_pyomo_False.ipynb | 392 +++++++++++++++++++++++++++ pypsa/linopf.py | 21 +- pypsa/linopt.py | 42 +-- 3 files changed, 423 insertions(+), 32 deletions(-) create mode 100644 examples/lopf_with_pyomo_False.ipynb diff --git a/examples/lopf_with_pyomo_False.ipynb b/examples/lopf_with_pyomo_False.ipynb new file mode 100644 index 000000000..ab4214a59 --- /dev/null +++ b/examples/lopf_with_pyomo_False.ipynb @@ -0,0 +1,392 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pypsa\n", + "import pandas as pd\n", + "import os" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "path = os.path.join(pypsa.__path__[0], '..', 'examples', 'ac-dc-meshed', 'ac-dc-data')\n", + "n = pypsa.Network(path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Modify the network a bit" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set gas generators to non-exdendable" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.generators.loc[n.generators.carrier == 'gas', 'p_nom_extendable'] = False" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Add ramp limit" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.generators.loc[n.generators.carrier == 'gas', 'ramp_limit_down'] = 0.2\n", + "n.generators.loc[n.generators.carrier == 'gas', 'ramp_limit_up'] = 0.2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Add additional storage units (cyclic and non-cyclic) and fix one state_of_charge" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.add('StorageUnit', 'su', bus='Manchester', marginal_cost=10, inflow=50,\n", + " p_nom_extendable=True, capital_cost=10, p_nom=2000,\n", + " efficiency_dispatch=0.5,\n", + " cyclic_state_of_charge=True, state_of_charge_initial=1000)\n", + "\n", + "n.add('StorageUnit', 'su2', bus='Manchester', marginal_cost=10,\n", + " p_nom_extendable=True, capital_cost=50, p_nom=2000,\n", + " efficiency_dispatch=0.5, carrier='gas',\n", + " cyclic_state_of_charge=False, state_of_charge_initial=1000)\n", + "\n", + "n.storage_units_t.state_of_charge_set.loc[n.snapshots[7], 'su'] = 100" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Add additional store" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.add('Bus', 'storebus', carrier='hydro', x=-5, y=55)\n", + "n.madd('Link', ['battery_power', 'battery_discharge'], '',\n", + " bus0=['Manchester', 'storebus'], bus1=['storebus', 'Manchester'],\n", + " p_nom=100, efficiency=.9, p_nom_extendable=True, p_nom_max=1000)\n", + "n.madd('Store', ['store'], bus='storebus', e_nom=2000, e_nom_extendable=True,\n", + " marginal_cost=10, capital_cost=10, e_nom_max=5000, e_initial=100,\n", + " e_cyclic=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Extra funcionalities:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pypsa.linopt import get_var, linexpr, join_exprs, define_constraints " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One of the most important functions is linexpr which take one or more tuples of coefficient and variable pairs which should go into the left hand side (lhs) of the constraint. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. Add mimimum for state_of_charge" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def minimal_state_of_charge(n, snapshots):\n", + " vars_soc = get_var(n, 'StorageUnit', 'state_of_charge')\n", + " lhs = linexpr((1, vars_soc))\n", + " define_constraints(n, lhs, '>', 50, 'StorageUnit', 'soc_lower_bound')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. Fix the ratio between ingoing and outgoing capacity of the Store" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def fix_link_cap_ratio(n, snapshots):\n", + " vars_link = get_var(n, 'Link', 'p_nom')\n", + " eff = n.links.at['battery_power', 'efficiency']\n", + " lhs = linexpr((1, vars_link['battery_power']), (-eff, vars_link['battery_discharge']))\n", + " define_constraints(n, lhs, '=', 0, 'battery_discharge', attr='fixratio')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 3. Every bus must in total produce the 20% of the total demand\n", + "\n", + "This requires the function `pypsa.linopt.join_exprs` which sums up arrays of linear expressions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def fix_bus_production(n, snapshots):\n", + " total_demand = n.loads_t.p_set.sum().sum() \n", + " prod_per_bus = linexpr((1, get_var(n, 'Generator', 'p'))).groupby(n.generators.bus, axis=1).apply(join_exprs)\n", + " define_constraints(n, prod_per_bus, '>=', total_demand/5, 'Bus', 'production_share')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Combine them ..." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def extra_functionalities(n, snapshots):\n", + " minimal_state_of_charge(n, snapshots)\n", + " fix_link_cap_ratio(n, snapshots)\n", + " fix_bus_production(n, snapshots)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### ...and run the lopf with `pyomo=False`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.lopf(pyomo=False, extra_functionality=extra_functionalities, \n", + " keep_shadowprices=['Bus', 'battery_discharge', 'StorageUnit'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `keep_shadowprices` argument in the lopf now decides which shadow prices (SP) should be retrieved. It can either be set to `True`, then all SP are kept. It also can be a list of names of the constraints. Therefore the `name` argument in `define_constraints` is necessary, in our case 'battery_discharge', 'StorageUnit' and 'Bus'. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Analysing the constraints" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see if the system got our own constraints. We look at `n.constraints` which combines summarises constraints going into the linear problem" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.constraints" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The last three entries show our constraints. As 'soc_lower_bound' is time-dependent, the `pnl` value is set to `True`. \n", + "\n", + "Let's check whether out two custom constraint are fulfilled:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.links.loc[['battery_power', 'battery_discharge'], ['p_nom_opt']]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.storage_units_t.state_of_charge" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.generators_t.p.groupby(n.generators.bus, axis=1).sum().sum()/n.loads_t.p.sum().sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Looks good! Now, let's see which dual values were parsed. Therefore we have a look into `n.dualvalues` \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n.dualvalues" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Again we see the last two entries reflect our constraints (the values in the columns play only a minor role). Having a look what the values are:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pypsa.linopt import get_dual" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "get_dual(n, 'StorageUnit', 'soc_lower_bound')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "get_dual(n, 'battery_discharge', 'fixratio')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "get_dual(n, 'Bus', 'production_share')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Side note\n", + "Some of the predefined constraints are stored in components itself like `n.lines_t.mu_upper` or `n.buses_t.marginal_price`, this is the case if their are designated columns are spots for those. All other dual are under the hook stored in `n.duals`" + ] + } + ], + "metadata": { + "@webio": { + "lastCommId": null, + "lastKernelId": null + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 776b0d197..106c1c9fb 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -128,8 +128,7 @@ def define_dispatch_for_extendable_constraints(n, sns, c, attr): lhs, *axes = linexpr((max_pu, nominal_v), (-1, operational_ext_v), return_axes=True) - constraints = write_constraint(n, lhs, '>=', rhs, axes) - set_conref(n, constraints, c, 'mu_upper', spec=attr) + define_constraints(n, lhs, '>=', rhs, c, 'mu_upper', axes=axes, spec=attr) lhs, *axes = linexpr((min_pu, nominal_v), (-1, operational_ext_v), return_axes=True) @@ -636,9 +635,6 @@ def map_solution(c, attr): # duals if keep_shadowprices == False: keep_shadowprices = [] -# # TODO move to argdefault -# elif keep_shadowprices is None: -# keep_shadowprices = ['Bus', 'Line', 'GlobalConstraint'] sp = n.constraints.index if isinstance(keep_shadowprices, list): @@ -649,13 +645,13 @@ def map_dual(c, attr): # or n.df(c). For the second case the index of the constraints have to # be a subset of n.df(c).index otherwise the dual is stored at # n.duals[c].df - sign = -1 if 'lower' in attr else 1 + sign = 1 if 'upper' in attr else -1 constraints = get_con(n, c, attr, pop=pop) is_pnl = isinstance(constraints, pd.DataFrame) - n.dualities.at[(c, attr), 'pnl'] = is_pnl + n.dualvalues.at[(c, attr), 'pnl'] = is_pnl to_component = c in n.all_components if is_pnl: - n.dualities.at[(c, attr), 'in_comp'] = to_component + n.dualvalues.at[(c, attr), 'in_comp'] = to_component duals = constraints.stack().map(sign * constraints_dual).unstack() if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) @@ -663,17 +659,17 @@ def map_dual(c, attr): set_from_frame(pnl, attr, duals) else: # here to_component can change - duals = constraints.map(constraints_dual) + duals = constraints.map(sign * constraints_dual) if to_component: to_component = (duals.index.isin(n.df(c).index).all()) - n.dualities.at[(c, attr), 'in_comp'] = to_component + n.dualvalues.at[(c, attr), 'in_comp'] = to_component if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) df = n.df(c) if to_component else n.duals[c].df df[attr] = duals n.duals = Dict() - n.dualities = pd.DataFrame(index=sp, columns=['in_comp', 'pnl']) + n.dualvalues = pd.DataFrame(index=sp, columns=['in_comp', 'pnl']) # extract shadow prices attached to components for c, attr in sp: map_dual(c, attr) @@ -818,7 +814,10 @@ def network_lopf(n, snapshots=None, solver_name="cbc", status, termination_condition, variables_sol, constraints_dual, obj = res if termination_condition != "optimal": + logger.warning('Problem was not solved to optimality') return status, termination_condition + else: + logger.info('Optimization successful. Objective value: {:.2e}'.format(obj)) if not keep_files: os.close(fdp); os.remove(problem_fn) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 3ffaef9e4..692e76f60 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -423,56 +423,56 @@ def get_con(n, c, attr, pop=False): return cons.pop(attr) if pop else cons[attr] -def get_dual(n, name, attr=''): +def get_sol(n, name, attr=''): """ - Retrieves shadow price for a given constraint. Note that for retrieving - shadow prices of a custom constraint, its name has to be passed to - `keep_references` in the lopf, or `keep_references` has to be set to True. - Note that a lookup of all stored shadow prices is given in n.dualities. + Retrieves solution for a given variable. Note that a lookup of all stored + solutions is given in n.solutions. + Parameters ---------- n : pypsa.Network c : str - constraint name to which the constraint belongs + general variable name (or component name if variable is attached to a + component) attr: str - attribute name of the constraints + attribute name of the variable Example ------- get_dual(n, 'Generator', 'mu_upper') """ - pnl = n.dualities.at[(name, attr), 'pnl'] - if n.dualities.at[(name, attr), 'in_comp']: + pnl = n.solutions.at[(name, attr), 'pnl'] + if n.solutions.at[(name, attr), 'in_comp']: return n.pnl(name)[attr] if pnl else n.df(name)[attr + '_opt'] else: - return n.duals[name].pnl[attr] if pnl else n.duals[name].df[attr] + return n.sols[name].pnl[attr] if pnl else n.sols[name].df[attr] -def get_sol(n, name, attr=''): +def get_dual(n, name, attr=''): """ - Retrieves solution for a given variable. Note that a lookup of all stored - solutions is given in n.solutions. - + Retrieves shadow price for a given constraint. Note that for retrieving + shadow prices of a custom constraint, its name has to be passed to + `keep_references` in the lopf, or `keep_references` has to be set to True. + Note that a lookup of all stored shadow prices is given in n.dualvalues. Parameters ---------- n : pypsa.Network c : str - general variable name (or component name if variable is attached to a - component) + constraint name to which the constraint belongs attr: str - attribute name of the variable + attribute name of the constraints Example ------- get_dual(n, 'Generator', 'mu_upper') """ - pnl = n.solutions.at[(name, attr), 'pnl'] - if n.solutions.at[(name, attr), 'in_comp']: - return n.pnl(name)[attr] if pnl else n.df(name)[attr + '_opt'] + pnl = n.dualvalues.at[(name, attr), 'pnl'] + if n.dualvalues.at[(name, attr), 'in_comp']: + return n.pnl(name)[attr] if pnl else n.df(name)[attr] else: - return n.sols[name].pnl[attr] if pnl else n.sols[name].df[attr] + return n.duals[name].pnl[attr] if pnl else n.duals[name].df[attr] # ============================================================================= From 1e23865d16a099800a1b921f911e5be609a41318 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Nov 2019 17:04:05 +0100 Subject: [PATCH 093/111] linopt/linopf: correct objective sign --- pypsa/linopf.py | 2 +- pypsa/linopt.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 106c1c9fb..28265dd35 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -473,7 +473,7 @@ def define_objective(n, sns): nom_attr = nominal_attrs.items() constant = sum(n.df(c)[attr] @ n.df(c).capital_cost for c, attr in nom_attr) object_const = write_bound(n, constant, constant) - n.objective_f.write(linexpr((1, object_const), as_pandas=False)[0]) + n.objective_f.write(linexpr((-1, object_const), as_pandas=False)[0]) for c, attr in lookup.query('marginal_cost').index: cost = (get_as_dense(n, c, 'marginal_cost', sns) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 692e76f60..9a92f457f 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -566,7 +566,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, info += line info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] status = info.Status.lower().strip() - objective = float(re.sub('[^0-9\.]+', '', info.Objective)) + objective = float(re.sub('[^0-9\.\+\-]+', '', info.Objective)) termination_condition = status if termination_condition != "optimal": From c0aaf8b262da322e992407b562ed77cb813bd681 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Nov 2019 19:50:25 +0100 Subject: [PATCH 094/111] linopt: rewrite _str_array to more performant --- pypsa/linopt.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 9a92f457f..4e1699615 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -298,20 +298,17 @@ def to_pandas(array, *axes): """ return pd.Series(array, *axes) if array.ndim == 1 else pd.DataFrame(array, *axes) +_to_float_str = lambda f: '%+f '%f +_v_to_float_str = np.vectorize(_to_float_str) def _str_array(array): if isinstance(array, (float, int)): - array = f'+{float(array)} ' if array >= 0 else f'{float(array)} ' - elif isinstance(array, (pd.Series, pd.DataFrame)): - array = array.values - if isinstance(array, np.ndarray): - if not (array.dtype == object) and array.size: - signs = to_pandas(array) - signs = (signs.pipe(np.sign) - .replace([0, 1, -1], ['+', '+', '-']).values) - array = signs + abs(array).astype(str) + ' ' - return array - + return _to_float_str(array) + array = np.asarray(array) + if array.dtype < str and array.size: + return _v_to_float_str(np.asarray(array)).astype(object) + else: + return array def join_exprs(df): """ @@ -320,7 +317,6 @@ def join_exprs(df): """ return ''.join(np.asarray(df).flatten()) - # ============================================================================= # references to vars and cons, rewrite this part to not store every reference # ============================================================================= From 842ee8b7d65c4e97df0e09d38f3b13e71a49b6dc Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Nov 2019 20:05:17 +0100 Subject: [PATCH 095/111] linopt set output type in _v_to_float_str --- pypsa/linopt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 4e1699615..9d304d700 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -299,14 +299,14 @@ def to_pandas(array, *axes): return pd.Series(array, *axes) if array.ndim == 1 else pd.DataFrame(array, *axes) _to_float_str = lambda f: '%+f '%f -_v_to_float_str = np.vectorize(_to_float_str) +_v_to_float_str = np.vectorize(_to_float_str, otypes=[object]) def _str_array(array): if isinstance(array, (float, int)): return _to_float_str(array) array = np.asarray(array) if array.dtype < str and array.size: - return _v_to_float_str(np.asarray(array)).astype(object) + return _v_to_float_str(np.asarray(array)) else: return array From 73d8e5c12a6dda92fedea231f036ddf5510f0150 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 21 Nov 2019 22:00:04 +0100 Subject: [PATCH 096/111] add simplified unit commitment --- pypsa/descriptors.py | 6 +-- pypsa/linopf.py | 98 ++++++++++++++++++++++++++++++------ pypsa/linopt.py | 54 +++++++++++++++++++- pypsa/stats.py | 1 + pypsa/variables.csv | 1 + test/test_unit_commitment.py | 7 +-- 6 files changed, 142 insertions(+), 25 deletions(-) diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index b8c5effc1..96f8d0df1 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -320,16 +320,14 @@ def get_extendable_i(n, c): """ Getter function. Get the index of extendable elements of a given component. """ - return n.df(c)[lambda ds: - ds[nominal_attrs[c] + '_extendable']].index + return n.df(c)[lambda ds: ds[nominal_attrs[c] + '_extendable']].index def get_non_extendable_i(n, c): """ Getter function. Get the index of non-extendable elements of a given component. """ - return n.df(c)[lambda ds: - ~ds[nominal_attrs[c] + '_extendable']].index + return n.df(c)[lambda ds: ~ds[nominal_attrs[c] + '_extendable']].index def get_bounds_pu(n, c, sns, index=slice(None), attr=None): """ diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 28265dd35..ff1cd95d9 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -26,7 +26,7 @@ from .linopt import (linexpr, write_bound, write_constraint, set_conref, set_varref, get_con, get_var, join_exprs, run_and_read_cbc, run_and_read_gurobi, run_and_read_glpk, define_constraints, - define_variables, align_with_static_component) + define_variables, align_with_static_component, define_binaries) import pandas as pd @@ -63,7 +63,7 @@ def define_nominal_for_extendable_variables(n, c, attr): define_variables(n, lower, upper, c, attr) -def define_dispatch_for_extendable_variables(n, sns, c, attr): +def define_dispatch_for_extendable_and_committable_variables(n, sns, c, attr): """ Initializes variables for power dispatch for a given component and a given attribute. @@ -78,6 +78,8 @@ def define_dispatch_for_extendable_variables(n, sns, c, attr): """ ext_i = get_extendable_i(n, c) + if c == 'Generator': + ext_i = ext_i | n.generators.query('committable').index if ext_i.empty: return define_variables(n, -np.inf, np.inf, c, attr, axes=[sns, ext_i], spec='extendables') @@ -97,6 +99,8 @@ def define_dispatch_for_non_extendable_variables(n, sns, c, attr): """ fix_i = get_non_extendable_i(n, c) + if c == 'Generator': + fix_i = fix_i.difference(n.generators.query('committable').index) if fix_i.empty: return nominal_fix = n.df(c)[nominal_attrs[c]][fix_i] min_pu, max_pu = get_bounds_pu(n, c, sns, fix_i, attr) @@ -168,6 +172,39 @@ def define_fixed_variable_constraints(n, sns, c, attr, pnl=True): set_conref(n, constraints, c, f'mu_{attr}_set') +def define_generator_status_variables(n, snapshots): + com_i = n.generators.query('committable').index + ext_i = get_extendable_i(n, 'Generator') + if not (ext_i & com_i).empty: + logger.warning("The following generators have both investment optimisation" + f" and unit commitment:\n\n\t{', '.join((ext_i & com_i))}\n\nCurrently PyPSA cannot " + "do both these functions, so PyPSA is choosing investment optimisation " + "for these generators.") + com_i = com_i.difference(ext_i) + if com_i.empty: return + define_binaries(n, (snapshots, com_i), 'Generator', 'status') + + +def define_committable_generator_constraints(n, snapshots): + c, attr = 'Generator', 'status' + com_i = n.df(c).query('committable and not p_nom_extendable').index + if com_i.empty: return + nominal = n.df(c)[nominal_attrs[c]][com_i] + min_pu, max_pu = get_bounds_pu(n, c, snapshots, com_i, 'p') + lower = min_pu.mul(nominal) + upper = max_pu.mul(nominal) + + status = get_var(n, c, attr) + p = get_var(n, c, 'p')[com_i] + + lhs = linexpr((lower, status), (-1, p)) + define_constraints(n, lhs, '<=', 0, 'Generators', 'committable_lb') + + lhs = linexpr((upper, status), (-1, p)) + define_constraints(n, lhs, '>=', 0, 'Generators', 'committable_ub') + + + def define_ramp_limit_constraints(n, sns): """ Defines ramp limits for generators wiht valid ramplimit @@ -178,35 +215,57 @@ def define_ramp_limit_constraints(n, sns): rdown_i = n.df(c).query('ramp_limit_down == ramp_limit_down').index if rup_i.empty & rdown_i.empty: return + fix_i = get_non_extendable_i(n, c) + ext_i = get_extendable_i(n, c) + com_i = n.df(c).query('committable').index.difference(ext_i) p = get_var(n, c, 'p').loc[sns[1:]] p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]] # fix up - gens_i = rup_i & get_non_extendable_i(n, c) + gens_i = rup_i & fix_i lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') define_constraints(n, lhs, '<=', rhs, c, 'mu_ramp_limit_up', spec='nonext.') # ext up - gens_i = rup_i & get_extendable_i(n, c) + gens_i = rup_i & ext_i limit_pu = n.df(c)['ramp_limit_up'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (-limit_pu, p_nom)) define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='ext.') + # com up + gens_i = rup_i & com_i + limit_start = n.df(c).loc[gens_i].eval('ramp_limit_start_up * p_nom') + limit_up = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') + status = get_var(n, c, 'status').loc[sns[1:], gens_i] + status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_start - limit_up, status_prev), (- limit_start, status)) + define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='com.') + # fix down - gens_i = rdown_i & get_non_extendable_i(n, c) + gens_i = rdown_i & fix_i lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i])) rhs = n.df(c).loc[gens_i].eval('-1 * ramp_limit_down * p_nom') define_constraints(n, lhs, '>=', rhs, c, 'mu_ramp_limit_down', spec='nonext.') # ext down - gens_i = rdown_i & get_extendable_i(n, c) + gens_i = rdown_i & ext_i limit_pu = n.df(c)['ramp_limit_down'][gens_i] p_nom = get_var(n, c, 'p_nom')[gens_i] lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), (limit_pu, p_nom)) define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='ext.') + # com down + gens_i = rdown_i & com_i + limit_shut = n.df(c).loc[gens_i].eval('ramp_limit_shut_down * p_nom') + limit_down = n.df(c).loc[gens_i].eval('ramp_limit_down * p_nom') + status = get_var(n, c, 'status').loc[sns[1:], gens_i] + status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_down - limit_shut, status), (limit_shut, status_prev)) + define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='com.') def define_nodal_balance_constraints(n, sns): """ @@ -501,7 +560,7 @@ def prepare_lopf(n, snapshots=None, keep_files=False, the lp file """ - n._xCounter, n._cCounter = 0, 0 + n._xCounter, n._cCounter = 1, 1 n.vars, n.cons = Dict(), Dict() cols = ['component', 'name', 'pnl', 'specification'] @@ -514,31 +573,35 @@ def prepare_lopf(n, snapshots=None, keep_files=False, fdo, objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True) fdc, constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True) fdb, bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True) + fdi, binaries_fn = mkstemp(prefix='pypsa-binaries-', suffix='.txt', text=True) fdp, problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True) n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') n.bounds_f = open(bounds_fn, mode='w') + n.binaries_f = open(binaries_fn, mode='w') n.objective_f.write('\* LOPF *\n\nmin\nobj:\n') n.constraints_f.write("\n\ns.t.\n\n") n.bounds_f.write("\nbounds\n") - + n.binaries_f.write("\nbinary\n") for c, attr in lookup.query('nominal and not handle_separately').index: define_nominal_for_extendable_variables(n, c, attr) # define_fixed_variable_constraints(n, snapshots, c, attr, pnl=False) for c, attr in lookup.query('not nominal and not handle_separately').index: define_dispatch_for_non_extendable_variables(n, snapshots, c, attr) - define_dispatch_for_extendable_variables(n, snapshots, c, attr) + define_dispatch_for_extendable_and_committable_variables(n, snapshots, c, attr) align_with_static_component(n, c, attr) define_dispatch_for_extendable_constraints(n, snapshots, c, attr) # define_fixed_variable_constraints(n, snapshots, c, attr) + define_generator_status_variables(n, snapshots) # consider only state_of_charge_set for the moment define_fixed_variable_constraints(n, snapshots, 'StorageUnit', 'state_of_charge') define_fixed_variable_constraints(n, snapshots, 'Store', 'e') + define_committable_generator_constraints(n, snapshots) define_ramp_limit_constraints(n, snapshots) define_storage_unit_constraints(n, snapshots) define_store_constraints(n, snapshots) @@ -550,15 +613,16 @@ def prepare_lopf(n, snapshots=None, keep_files=False, if extra_functionality is not None: extra_functionality(n, snapshots) - n.bounds_f.write("end\n") + n.binaries_f.write("end\n") # explicit closing with file descriptor is necessary for windows machines - for f, fd in (('bounds_f', fdb), ('constraints_f', fdc), ('objective_f', fdo)): + for f, fd in (('bounds_f', fdb), ('constraints_f', fdc), + ('objective_f', fdo), ('binaries_f', fdi)): getattr(n, f).close(); delattr(n, f); os.close(fd) # concate files with open(problem_fn, 'wb') as wfd: - for f in [objective_fn, constraints_fn, bounds_fn]: + for f in [objective_fn, constraints_fn, bounds_fn, binaries_fn]: with open(f,'rb') as fd: shutil.copyfileobj(fd, wfd) if not keep_files: @@ -620,7 +684,7 @@ def map_solution(c, attr): n.sols = Dict() n.solutions = pd.DataFrame(index=n.variables.index, columns=['in_comp', 'pnl']) - for c, attr in n.variables.index.intersection(lookup.index): + for c, attr in n.variables.index: map_solution(c, attr) # if nominal capcity was no variable set optimal value to nominal @@ -787,9 +851,11 @@ def network_lopf(n, snapshots=None, solver_name="cbc", raise NotImplementedError("Only the kirchhoff formulation is supported") if n.generators.committable.any(): - logger.warn("Unit commitment is not yet implemented for optimisation " - "without using pyomo. The following generators will be treated as " - f"non-committables:\n{list(n.generators.query('committable').index)}") + logger.warn("Unit commitment is not yet completely implemented for " + "optimisation without using pyomo. Thus minimum up time, " + "minimum down time, start up costs, shut down costs and " + "case of ramping if not at start of network.snapshots" + "will be ignored.") #disable logging because multiple slack bus calculations, keep output clean snapshots = _as_snapshots(n, snapshots) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 9d304d700..5b2ae80fd 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -77,6 +77,37 @@ def define_variables(n, lower, upper, name, attr='', axes=None, spec=''): set_varref(n, var, name, attr, spec=spec) +def define_binaries(n, axes, name, attr='', spec=''): + """ + Defines binary-variable(s) for pypsa-network. The variables are stored + in the network object under n.vars with key of the variable name. + For each entry for the pd.Series of pd.DataFrame spanned by the axes + argument the function defines a binary. + + Parameter + --------- + n : pypsa.Network + axes : pd.Index or tuple of pd.Index objects + Specifies the axes and therefore the shape of the variables. + name : str + general name of the variable (or component which the variable is + referring to). The variable will then be stored under: + * n.vars[name].pnl if the variable is two-dimensional + * n.vars[name].df if the variable is one-dimensional + attr : str default '' + Specifying name of the variable, defines under which name the variable(s) + are stored in n.vars[name].pnl if two-dimensional or in n.vars[name].df + if one-dimensional + + See also + --------- + define_variables + + """ + var = write_binary(n, axes) + set_varref(n, var, name, attr, spec=spec) + + def define_constraints(n, lhs, sense, rhs, name, attr='', axes=None, spec=''): """ Defines constraint(s) for pypsa-network with given left hand side (lhs), @@ -184,9 +215,22 @@ def write_constraint(n, lhs, sense, rhs, axes=None): if isinstance(sense, str): sense = '=' if sense == '==' else sense lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) - n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + '\n' + rhs + '\n\n')) + n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + ' ' + rhs + '\n\n')) return to_pandas(cons, *axes) +def write_binary(n, axes): + """ + Writer function for writing out mutliple binary-variables at a time. + According to the axes it writes out binaries for each entry the pd.Series + or pd.DataFrame spanned by axes. Returns a series or frame with variable + references. + """ + axes, shape, length = _get_handlers(axes) + n._xCounter += length + variables = np.array([f'x{x}' for x in range(n._xCounter - length, + n._xCounter)], dtype=object).reshape(shape) + n.binaries_f.write(join_exprs(variables + '\n')) + return to_pandas(variables, *axes) # ============================================================================= # helpers, helper functions @@ -512,8 +556,10 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, objective = float(data[len("Optimal - objective value "):]) elif "Infeasible" in data: termination_condition = "infeasible" + status = 'infeasible' else: termination_condition = "other" + status = 'other' if termination_condition != "optimal": return status, termination_condition, None, None, None @@ -625,7 +671,11 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, return status, termination_condition, None, None, None variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}) - constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) + try: + constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) + except AttributeError: + logger.warning("Shadow prices of MILP couldn't be parsed") + constraints_dual = pd.Series(index=[c.ConstrName for c in m.getConstrs()]) termination_condition = status objective = m.ObjVal del m diff --git a/pypsa/stats.py b/pypsa/stats.py index 388e20771..a346aeb7d 100644 --- a/pypsa/stats.py +++ b/pypsa/stats.py @@ -208,6 +208,7 @@ def check_constraints(n, tol=1e-3): Returns AssertionError if tolerance is exceeded. """ + n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) stats = constraint_stats(n).rename(index=str.title) condition = stats.T[['Min', 'Max']].query('Min < -@tol | Max > @tol').T assert condition.empty, (f'The following constraint(s) are exceeding the ' diff --git a/pypsa/variables.csv b/pypsa/variables.csv index 37873372d..673b5dcf3 100644 --- a/pypsa/variables.csv +++ b/pypsa/variables.csv @@ -1,5 +1,6 @@ component,variable,marginal_cost,nominal,handle_separately Generator,p,True,False,False +Generator,status,False,False,True Generator,p_nom,False,True,False Line,s,False,False,False Line,s_nom,False,True,False diff --git a/test/test_unit_commitment.py b/test/test_unit_commitment.py index 3639b3c24..778c4d501 100644 --- a/test/test_unit_commitment.py +++ b/test/test_unit_commitment.py @@ -8,7 +8,7 @@ -def test_part_load(): +def test_part_load(pyomo=True): """This test is based on https://pypsa.org/examples/unit-commitment.html and is not very comprehensive.""" @@ -38,7 +38,7 @@ def test_part_load(): solver_name = "glpk" - nu.lopf(nu.snapshots,solver_name=solver_name) + nu.lopf(nu.snapshots,solver_name=solver_name, pyomo=pyomo) expected_status = np.array([[1,1,1,0],[0,0,0,1]],dtype=float).T @@ -138,4 +138,5 @@ def test_minimum_down_time(): if __name__ == "__main__": test_minimum_down_time() test_minimum_up_time() - test_part_load() + test_part_load(pyomo=True) + test_part_load(pyomo=False) From 5bdbcd26fe7b941a93a469f014ab1bb4040f89ce Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 21 Nov 2019 22:17:53 +0100 Subject: [PATCH 097/111] linopf correct warning string --- pypsa/linopf.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index ff1cd95d9..4e7aec40e 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -852,10 +852,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", if n.generators.committable.any(): logger.warn("Unit commitment is not yet completely implemented for " - "optimisation without using pyomo. Thus minimum up time, " - "minimum down time, start up costs, shut down costs and " - "case of ramping if not at start of network.snapshots" - "will be ignored.") + "optimising without pyomo. Thus minimum up time, minimum down time, " + "start up costs, shut down costs will be ignored.") #disable logging because multiple slack bus calculations, keep output clean snapshots = _as_snapshots(n, snapshots) From c568ec8777b0fd491097ee9a20a6be0fa6d0b5d5 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 21 Nov 2019 23:11:24 +0100 Subject: [PATCH 098/111] linopf: omit ramp constraints for committables if not any there --- pypsa/linopf.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 4e7aec40e..703987bf5 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -236,13 +236,14 @@ def define_ramp_limit_constraints(n, sns): # com up gens_i = rup_i & com_i - limit_start = n.df(c).loc[gens_i].eval('ramp_limit_start_up * p_nom') - limit_up = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') - status = get_var(n, c, 'status').loc[sns[1:], gens_i] - status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (limit_start - limit_up, status_prev), (- limit_start, status)) - define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='com.') + if not gens_i.empty: + limit_start = n.df(c).loc[gens_i].eval('ramp_limit_start_up * p_nom') + limit_up = n.df(c).loc[gens_i].eval('ramp_limit_up * p_nom') + status = get_var(n, c, 'status').loc[sns[1:], gens_i] + status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_start - limit_up, status_prev), (- limit_start, status)) + define_constraints(n, lhs, '<=', 0, c, 'mu_ramp_limit_up', spec='com.') # fix down gens_i = rdown_i & fix_i @@ -259,13 +260,14 @@ def define_ramp_limit_constraints(n, sns): # com down gens_i = rdown_i & com_i - limit_shut = n.df(c).loc[gens_i].eval('ramp_limit_shut_down * p_nom') - limit_down = n.df(c).loc[gens_i].eval('ramp_limit_down * p_nom') - status = get_var(n, c, 'status').loc[sns[1:], gens_i] - status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] - lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), - (limit_down - limit_shut, status), (limit_shut, status_prev)) - define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='com.') + if not gens_i.empty: + limit_shut = n.df(c).loc[gens_i].eval('ramp_limit_shut_down * p_nom') + limit_down = n.df(c).loc[gens_i].eval('ramp_limit_down * p_nom') + status = get_var(n, c, 'status').loc[sns[1:], gens_i] + status_prev = get_var(n, c, 'status').shift(1).loc[sns[1:], gens_i] + lhs = linexpr((1, p[gens_i]), (-1, p_prev[gens_i]), + (limit_down - limit_shut, status), (limit_shut, status_prev)) + define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', spec='com.') def define_nodal_balance_constraints(n, sns): """ From 0358473b4f3302e56a40a7a7a8929d812ba58c53 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 22 Nov 2019 12:43:06 +0100 Subject: [PATCH 099/111] doc update --- doc/optimal_power_flow.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 5a412c640..caa444a67 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -56,7 +56,7 @@ for more details). -.. important:: Since version v0.15, PyPSA enables the optimisation without the use of `pyomo `_. This make the ``lopf`` function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. +.. important:: Since version v0.15.1, PyPSA enables the optimisation without the use of `pyomo `_. This make the ``lopf`` function much more efficient in terms of memory usage and time. For this purpose two new module were introduced, ``pypsa.linopf`` and ``pypsa.linopt`` wich mainly reflect the functionality of ``pypsa.opf`` and ``pypsa.opt`` but without using pyomo. Note that when setting pyomo to False, the ``extra_functionality`` has to be adapted to the appropriate syntax. .. warning:: If the transmission capacity is changed in passive networks, then the impedance will also change (i.e. if parallel lines are installed). This is NOT reflected in the ordinary LOPF, however ``pypsa.linopf.ilopf`` covers this through an iterative process as done `in here `_. @@ -207,7 +207,7 @@ Generator unit commitment constraints These are defined in ``pypsa.opf.define_generator_variables_constraints(network,snapshots)``. -.. important:: Unit commitment constraints will only be build if pyomo is set to True +.. important:: Unit commitment constraints will only be build fully if pyomo is set to True. If pyomo is set to False a simplified version of the unit commitment is calculated by ignoring the parameters `min_up_time`, `min_down_time`, `start_up_cost`, `shut_down_cost`, `up_time_before` and `down_time_before`. The implementation is a complete implementation of the unit commitment constraints defined in Chapter 4.3 of `Convex Optimization of Power Systems `_ by Joshua Adam Taylor (CUP, 2015). From 056af017dbccad95c2b925c51847d28c1dd2f733 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 22 Nov 2019 12:44:29 +0100 Subject: [PATCH 100/111] doc update II --- pypsa/linopt.py | 3 ++- test/test_opf_storage.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 5b2ae80fd..202e71423 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -45,6 +45,7 @@ def define_variables(n, lower, upper, name, attr='', axes=None, spec=''): referring to). The variable will then be stored under: * n.vars[name].pnl if the variable is two-dimensional * n.vars[name].df if the variable is one-dimensional + but can easily be accessed with :func:`get_var(n, name, attr)` attr : str default '' Specifying name of the variable, defines under which name the variable(s) are stored in n.vars[name].pnl if two-dimensional or in n.vars[name].df @@ -113,7 +114,7 @@ def define_constraints(n, lhs, sense, rhs, name, attr='', axes=None, spec=''): Defines constraint(s) for pypsa-network with given left hand side (lhs), sense and right hand side (rhs). The constraints are stored in the network object under n.cons with key of the constraint name. If multiple constraints - are defined at ones only using np.arrays then the axes argument can be used + are defined at ones, only using np.arrays, then the axes argument can be used for defining the axes for the constraints (this is espececially recommended for time-dependent constraints). If one of lhs, sense and rhs is a pd.Series/pd.DataFrame the axes argument is not necessary. diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index c554199c8..00b4d8932 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -8,7 +8,7 @@ solvers = ['glpk'] if sys.platform == 'win32' else ['cbc', 'glpk'] -def test_opf(pyomo=True): +def test_opf(): csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "opf-storage-hvdc","opf-storage-data") From a3ef8a6147c411654bccb28f8e22b13ec9c1fe3e Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 25 Nov 2019 13:17:42 +0100 Subject: [PATCH 101/111] add solver_dir argument for specifying io directory for solving (default set by tempfile) correct objective constant for only extendables --- pypsa/components.py | 4 ++++ pypsa/linopf.py | 47 +++++++++++++++++++++++++++------------------ pypsa/linopt.py | 19 ++++++++++-------- 3 files changed, 43 insertions(+), 27 deletions(-) diff --git a/pypsa/components.py b/pypsa/components.py index 4b7ec8f86..3e1a8b579 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -485,6 +485,10 @@ def lopf(self, snapshots=None, pyomo=True, solver_name="glpk", names. Defaults to ['Bus', 'Line', 'GlobalConstraint']. After solving, the shadow prices can be retrieved using :func:`pypsa.linopt.get_dual` with corresponding name + solver_dir : str, default None + Only taking effect when pyomo is False. + Path to directory where necessary files are written, default None leads + to the default temporary directory used by tempfile.mkstemp(). """ args = {'snapshots': snapshots, 'keep_files': keep_files, diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 703987bf5..7e90b5c9b 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -532,7 +532,10 @@ def define_objective(n, sns): """ # constant for already done investment nom_attr = nominal_attrs.items() - constant = sum(n.df(c)[attr] @ n.df(c).capital_cost for c, attr in nom_attr) + constant = 0 + for c, attr in nom_attr: + ext_i = get_extendable_i(n, c) + constant += n.df(c)[attr][ext_i] @ n.df(c).capital_cost[ext_i] object_const = write_bound(n, constant, constant) n.objective_f.write(linexpr((-1, object_const), as_pandas=False)[0]) @@ -552,7 +555,7 @@ def define_objective(n, sns): def prepare_lopf(n, snapshots=None, keep_files=False, - extra_functionality=None): + extra_functionality=None, solver_dir=None): """ Sets up the linear problem and writes it out to a lp file @@ -572,11 +575,13 @@ def prepare_lopf(n, snapshots=None, keep_files=False, snapshots = n.snapshots if snapshots is None else snapshots start = time.time() - fdo, objective_fn = mkstemp(prefix='pypsa-objectve-', suffix='.txt', text=True) - fdc, constraints_fn = mkstemp(prefix='pypsa-constraints-', suffix='.txt', text=True) - fdb, bounds_fn = mkstemp(prefix='pypsa-bounds-', suffix='.txt', text=True) - fdi, binaries_fn = mkstemp(prefix='pypsa-binaries-', suffix='.txt', text=True) - fdp, problem_fn = mkstemp(prefix='pypsa-problem-', suffix='.lp', text=True) + tmpkwargs = dict(text=True, dir=solver_dir) + # mkstemp(suffix, prefix, **tmpkwargs) + fdo, objective_fn = mkstemp('.txt', 'pypsa-objectve-', **tmpkwargs) + fdc, constraints_fn = mkstemp('.txt', 'pypsa-constraints-', **tmpkwargs) + fdb, bounds_fn = mkstemp('.txt', 'pypsa-bounds-', **tmpkwargs) + fdi, binaries_fn = mkstemp('.txt', 'pypsa-binaries-', **tmpkwargs) + fdp, problem_fn = mkstemp('.lp', 'pypsa-problem-', **tmpkwargs) n.objective_f = open(objective_fn, mode='w') n.constraints_f = open(constraints_fn, mode='w') @@ -786,7 +791,8 @@ def network_lopf(n, snapshots=None, solver_name="cbc", extra_postprocessing=None, formulation="kirchhoff", keep_references=False, keep_files=False, keep_shadowprices=['Bus', 'Line', 'GlobalConstraint'], - solver_options=None, warmstart=False, store_basis=True): + solver_options=None, warmstart=False, store_basis=False, + solver_dir=None): """ Linear optimal power flow for a group of snapshots. @@ -806,6 +812,9 @@ def network_lopf(n, snapshots=None, solver_name="cbc", solver_options : dictionary A dictionary with additional options that get passed to the solver. (e.g. {'threads':2} tells gurobi to use only 2 cpus) + solver_dir : str, default None + Path to directory where necessary files are written, default None leads + to the default temporary directory used by tempfile.mkstemp(). keep_files : bool, default False Keep the files that pyomo constructs from OPF problem construction, e.g. .lp file - useful for debugging @@ -828,7 +837,7 @@ def network_lopf(n, snapshots=None, solver_name="cbc", Use this to warmstart the optimization. Pass a string which gives the path to the basis file. If set to True, a path to a basis file must be given in network.basis_fn. - store_basis : bool, default True + store_basis : bool, default False Whether to store the basis of the optimization results. If True, the path to the basis file is saved in network.basis_fn. Note that a basis can only be stored if simplex, dual-simplex, or barrier @@ -863,32 +872,31 @@ def network_lopf(n, snapshots=None, solver_name="cbc", n.determine_network_topology() logger.info("Prepare linear problem") - fdp, problem_fn = prepare_lopf(n, snapshots, keep_files, extra_functionality) - fds, solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol') - if solver_logfile is None: - fdl, solver_logfile = mkstemp(prefix='pypsa-solve', suffix='.log') + fdp, problem_fn = prepare_lopf(n, snapshots, keep_files, + extra_functionality, solver_dir) + fds, solution_fn = mkstemp(prefix='pypsa-solve', suffix='.sol', dir=solver_dir) if warmstart == True: warmstart = n.basis_fn logger.info("Solve linear problem using warmstart") else: - logger.info("Solve linear problem") + logger.info(f"Solve linear problem using {solver_name.title()} solver") solve = eval(f'run_and_read_{solver_name}') res = solve(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart, store_basis) status, termination_condition, variables_sol, constraints_dual, obj = res - if termination_condition != "optimal": + if not keep_files: + os.close(fdp); os.remove(problem_fn) + os.close(fds); os.remove(solution_fn) + + if "optimal" not in termination_condition: logger.warning('Problem was not solved to optimality') return status, termination_condition else: logger.info('Optimization successful. Objective value: {:.2e}'.format(obj)) - if not keep_files: - os.close(fdp); os.remove(problem_fn) - os.close(fds); os.remove(solution_fn) - n.objective = obj assign_solution(n, snapshots, variables_sol, constraints_dual, keep_references=keep_references, @@ -952,6 +960,7 @@ def msq_diff(n, s_nom_prev): return lines_err iteration = 0 + kwargs['store_basis'] = True diff = msq_threshold while diff >= msq_threshold or iteration < min_iterations: if iteration >= max_iterations: diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 202e71423..e85e823b5 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -541,10 +541,8 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, n.basis_fn = solution_fn.replace('.sol', '.bas') command += f'-basisO {n.basis_fn} ' - if solver_logfile is None: - os.system(command) - else: - result = subprocess.run(command.split(' '), stdout=subprocess.PIPE) + result = subprocess.run(command.split(' '), stdout=subprocess.PIPE) + if solver_logfile is not None: print(result.stdout.decode('utf-8'), file=open(solver_logfile, 'w')) f = open(solution_fn,"r") @@ -598,7 +596,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, if (solver_options is not None) and (solver_options != {}): command += solver_options - os.system(command) + subprocess.run(command.split(' '), stdout=subprocess.PIPE) f = open(solution_fn) info = '' @@ -612,7 +610,7 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, objective = float(re.sub('[^0-9\.\+\-]+', '', info.Objective)) termination_condition = status - if termination_condition != "optimal": + if 'optimal' not in termination_condition: return status, termination_condition, None, None, None sol = pd.read_fwf(f).set_index('Row name') @@ -620,8 +618,13 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, variables_sol = sol[variables_b]['Activity'].astype(float) sol = sol[~variables_b] constraints_b = sol.index.str[0] == 'c' - constraints_dual = (pd.to_numeric(sol[constraints_b]['Marginal'], 'coerce') - .fillna(0)) + try: + constraints_dual = pd.to_numeric(sol[constraints_b]['Marginal'], + 'coerce').fillna(0) + except KeyError: + logger.warning("Shadow prices of MILP couldn't be parsed") + constraints_dual = pd.Series(index=sol.index[constraints_b]) + f.close() return (status, termination_condition, variables_sol, From 69c9880a760dc7e2f485d541ebea01f9eedfe0d1 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 26 Nov 2019 15:58:22 +0100 Subject: [PATCH 102/111] linopf: again, fix dual sign (a general rule is still unlclear) --- pypsa/linopf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 7e90b5c9b..b9393391d 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -716,9 +716,10 @@ def map_dual(c, attr): # or n.df(c). For the second case the index of the constraints have to # be a subset of n.df(c).index otherwise the dual is stored at # n.duals[c].df - sign = 1 if 'upper' in attr else -1 constraints = get_con(n, c, attr, pop=pop) is_pnl = isinstance(constraints, pd.DataFrame) + # TODO: setting the sign is not very clear + sign = 1 if 'upper' in attr or attr == 'marginal_price' else -1 n.dualvalues.at[(c, attr), 'pnl'] = is_pnl to_component = c in n.all_components if is_pnl: From f0e533bc55e4ededbf4595ec2cf7ae7cd3dafc0e Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 26 Nov 2019 17:54:58 +0100 Subject: [PATCH 103/111] store integer references instead of strings --- pypsa/linopt.py | 43 ++++++++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index e85e823b5..b9182863c 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -196,10 +196,10 @@ def write_bound(n, lower, upper, axes=None): """ axes, shape, length = _get_handlers(axes, lower, upper) n._xCounter += length - variables = np.array([f'x{x}' for x in range(n._xCounter - length, n._xCounter)], - dtype=object).reshape(shape) + variables = np.arange(n._xCounter - length, n._xCounter).reshape(shape) lower, upper = _str_array(lower), _str_array(upper) - n.bounds_f.write(join_exprs(lower + ' <= '+ variables + ' <= '+ upper + '\n')) + n.bounds_f.write(join_exprs(lower + ' <= x' + _str_array(variables, True) + + ' <= '+ upper + '\n')) return to_pandas(variables, *axes) def write_constraint(n, lhs, sense, rhs, axes=None): @@ -211,12 +211,12 @@ def write_constraint(n, lhs, sense, rhs, axes=None): """ axes, shape, length = _get_handlers(axes, lhs, sense, rhs) n._cCounter += length - cons = np.array([f'c{x}' for x in range(n._cCounter - length, n._cCounter)], - dtype=object).reshape(shape) + cons = np.arange(n._cCounter - length, n._cCounter).reshape(shape) if isinstance(sense, str): sense = '=' if sense == '==' else sense lhs, sense, rhs = _str_array(lhs), _str_array(sense), _str_array(rhs) - n.constraints_f.write(join_exprs(cons + ':\n' + lhs + sense + ' ' + rhs + '\n\n')) + n.constraints_f.write(join_exprs('c' + _str_array(cons, True) + ':\n' + + lhs + sense + ' ' + rhs + '\n\n')) return to_pandas(cons, *axes) def write_binary(n, axes): @@ -328,7 +328,7 @@ def linexpr(*tuples, as_pandas=True, return_axes=False): expr = np.repeat('', np.prod(shape)).reshape(shape).astype(object) if np.prod(shape): for coeff, var in tuples: - expr = expr + _str_array(coeff) + _str_array(var) + '\n' + expr = expr + _str_array(coeff) + ' x' + _str_array(var, True) + '\n' if return_axes: return (expr, *axes) if as_pandas: @@ -343,14 +343,21 @@ def to_pandas(array, *axes): """ return pd.Series(array, *axes) if array.ndim == 1 else pd.DataFrame(array, *axes) -_to_float_str = lambda f: '%+f '%f +_to_float_str = lambda f: '%+f'%f _v_to_float_str = np.vectorize(_to_float_str, otypes=[object]) -def _str_array(array): +_to_int_str = lambda d: '%d'%d +_v_to_int_str = np.vectorize(_to_int_str, otypes=[object]) + +def _str_array(array, integer_string=False): if isinstance(array, (float, int)): + if integer_string: + return _to_int_str(array) return _to_float_str(array) array = np.asarray(array) if array.dtype < str and array.size: + if integer_string: + return _v_to_int_str(np.asarray(array)) return _v_to_float_str(np.asarray(array)) else: return array @@ -520,6 +527,10 @@ def get_dual(n, name, attr=''): # solvers # ============================================================================= +def set_int_index(ser): + ser.index = ser.index.str[1:].astype(int) + return ser + def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, solver_options, keep_files, warmstart=None, store_basis=True): @@ -566,8 +577,8 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, sol = pd.read_csv(solution_fn, header=None, skiprows=[0], sep=r'\s+', usecols=[1,2,3], index_col=0) variables_b = sol.index.str[0] == 'x' - variables_sol = sol[variables_b][2] - constraints_dual = sol[~variables_b][3] + variables_sol = sol[variables_b][2].pipe(set_int_index) + constraints_dual = sol[~variables_b][3].pipe(set_int_index) return (status, termination_condition, variables_sol, constraints_dual, objective) @@ -615,12 +626,12 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, sol = pd.read_fwf(f).set_index('Row name') variables_b = sol.index.str[0] == 'x' - variables_sol = sol[variables_b]['Activity'].astype(float) + variables_sol = sol[variables_b]['Activity'].astype(float).pipe(set_int_index) sol = sol[~variables_b] constraints_b = sol.index.str[0] == 'c' try: constraints_dual = pd.to_numeric(sol[constraints_b]['Marginal'], - 'coerce').fillna(0) + 'coerce').fillna(0).pipe(set_int_index) except KeyError: logger.warning("Shadow prices of MILP couldn't be parsed") constraints_dual = pd.Series(index=sol.index[constraints_b]) @@ -674,9 +685,11 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, if termination_condition != "optimal": return status, termination_condition, None, None, None - variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}) + variables_sol = pd.Series({v.VarName: v.x for v + in m.getVars()}).pipe(set_int_index) try: - constraints_dual = pd.Series({c.ConstrName: c.Pi for c in m.getConstrs()}) + constraints_dual = pd.Series({c.ConstrName: c.Pi for c in + m.getConstrs()}).pipe(set_int_index) except AttributeError: logger.warning("Shadow prices of MILP couldn't be parsed") constraints_dual = pd.Series(index=[c.ConstrName for c in m.getConstrs()]) From cbe6e81f73d6f36cd301261c515d5aa4dbfb802d Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 26 Nov 2019 19:35:30 +0100 Subject: [PATCH 104/111] clean lp file writing a bit --- pypsa/linopf.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index b9393391d..593df321f 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -319,7 +319,7 @@ def define_kirchhoff_constraints(n, sns): def cycle_flow(ds): ds = ds[lambda ds: ds!=0.].dropna() - vals = linexpr((ds, branch_vars[ds.index]), as_pandas=False) + '\n' + vals = linexpr((ds, branch_vars[ds.index]), as_pandas=False) return vals.sum(1) constraints = [] @@ -499,7 +499,7 @@ def define_global_constraints(n, sns): if ext_i.empty: continue v = linexpr((n.df(c).length[ext_i], get_var(n, c, attr)[ext_i]), as_pandas=False) - lhs += join_exprs(v) + '\n' + lhs += '\n' + join_exprs(v) if lhs == '': continue sense = glc.sense rhs = glc.constant @@ -517,7 +517,7 @@ def define_global_constraints(n, sns): if ext_i.empty: continue v = linexpr((n.df(c).capital_cost[ext_i], get_var(n, c, attr)[ext_i]), as_pandas=False) - lhs += join_exprs(v) + '\n' + lhs += '\n' + join_exprs(v) if lhs == '': continue sense = glc.sense rhs = glc.constant @@ -550,7 +550,7 @@ def define_objective(n, sns): for c, attr in nominal_attrs.items(): cost = n.df(c)['capital_cost'][get_extendable_i(n, c)] if cost.empty: continue - terms = linexpr((cost, get_var(n, c, attr)[cost.index])) + '\n' + terms = linexpr((cost, get_var(n, c, attr)[cost.index])) n.objective_f.write(join_exprs(terms)) From 49914577459707f71c5e7c3eb9a4cdd8f846b3d8 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 26 Nov 2019 21:03:17 +0100 Subject: [PATCH 105/111] linopt: fix reading solution for glpk --- pypsa/linopt.py | 43 ++++++++++++++++--------------- test/test_unit_commitment.py | 49 +++++++++++++++++++++++++++++++++--- 2 files changed, 68 insertions(+), 24 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index b9182863c..6f4d727a9 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -195,6 +195,7 @@ def write_bound(n, lower, upper, axes=None): Return a series or frame with variable references. """ axes, shape, length = _get_handlers(axes, lower, upper) + if not length: return pd.Series() n._xCounter += length variables = np.arange(n._xCounter - length, n._xCounter).reshape(shape) lower, upper = _str_array(lower), _str_array(upper) @@ -210,6 +211,7 @@ def write_constraint(n, lhs, sense, rhs, axes=None): Return a series or frame with constraint references. """ axes, shape, length = _get_handlers(axes, lhs, sense, rhs) + if not length: return pd.Series() n._cCounter += length cons = np.arange(n._cCounter - length, n._cCounter).reshape(shape) if isinstance(sense, str): @@ -228,9 +230,8 @@ def write_binary(n, axes): """ axes, shape, length = _get_handlers(axes) n._xCounter += length - variables = np.array([f'x{x}' for x in range(n._xCounter - length, - n._xCounter)], dtype=object).reshape(shape) - n.binaries_f.write(join_exprs(variables + '\n')) + variables = np.arange(n._xCounter - length, n._xCounter).reshape(shape) + n.binaries_f.write(join_exprs('x' + _str_array(variables, True) + '\n')) return to_pandas(variables, *axes) # ============================================================================= @@ -610,13 +611,15 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, subprocess.run(command.split(' '), stdout=subprocess.PIPE) f = open(solution_fn) - info = '' - linebreak = False - while not linebreak: - line = f.readline() - linebreak = line == '\n' - info += line - info = pd.read_csv(io.StringIO(info), sep=':', index_col=0, header=None)[1] + def read_until_break(f): + linebreak = False + while not linebreak: + line = f.readline() + linebreak = line == '\n' + yield line + + info = io.StringIO(''.join(read_until_break(f))[:-2]) + info = pd.read_csv(info, sep=':', index_col=0, header=None)[1] status = info.Status.lower().strip() objective = float(re.sub('[^0-9\.\+\-]+', '', info.Objective)) termination_condition = status @@ -624,18 +627,18 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile, if 'optimal' not in termination_condition: return status, termination_condition, None, None, None - sol = pd.read_fwf(f).set_index('Row name') - variables_b = sol.index.str[0] == 'x' - variables_sol = sol[variables_b]['Activity'].astype(float).pipe(set_int_index) - sol = sol[~variables_b] - constraints_b = sol.index.str[0] == 'c' - try: - constraints_dual = pd.to_numeric(sol[constraints_b]['Marginal'], - 'coerce').fillna(0).pipe(set_int_index) - except KeyError: + duals = io.StringIO(''.join(read_until_break(f))[:-2]) + duals = pd.read_fwf(duals)[1:].set_index('Row name') + if 'Marginal' in duals: + constraints_dual = pd.to_numeric(duals['Marginal'], 'coerce')\ + .fillna(0).pipe(set_int_index) + else: logger.warning("Shadow prices of MILP couldn't be parsed") - constraints_dual = pd.Series(index=sol.index[constraints_b]) + constraints_dual = pd.Series(index=duals.index) + sol = io.StringIO(''.join(read_until_break(f))[:-2]) + variables_sol = (pd.read_fwf(sol)[1:].set_index('Column name') + ['Activity'].astype(float).pipe(set_int_index)) f.close() return (status, termination_condition, variables_sol, diff --git a/test/test_unit_commitment.py b/test/test_unit_commitment.py index 778c4d501..9b0c0f4ad 100644 --- a/test/test_unit_commitment.py +++ b/test/test_unit_commitment.py @@ -8,7 +8,7 @@ -def test_part_load(pyomo=True): +def test_part_load(): """This test is based on https://pypsa.org/examples/unit-commitment.html and is not very comprehensive.""" @@ -38,7 +38,48 @@ def test_part_load(pyomo=True): solver_name = "glpk" - nu.lopf(nu.snapshots,solver_name=solver_name, pyomo=pyomo) + nu.lopf(nu.snapshots,solver_name=solver_name) + + expected_status = np.array([[1,1,1,0],[0,0,0,1]],dtype=float).T + + np.testing.assert_array_almost_equal(nu.generators_t.status.values,expected_status) + + expected_dispatch = np.array([[4000,6000,5000,0],[0,0,0,800]],dtype=float).T + + np.testing.assert_array_almost_equal(nu.generators_t.p.values,expected_dispatch) + + +def test_part_load_without_pyomo(): + """This test is based on +https://pypsa.org/examples/unit-commitment.html +and is not very comprehensive.""" + + nu = pypsa.Network() + + snapshots = range(4) + + nu.set_snapshots(snapshots) + + nu.add("Bus","bus") + + + nu.add("Generator","coal",bus="bus", + committable=True, + p_min_pu=0.3, + marginal_cost=20, + p_nom=10000) + + nu.add("Generator","gas",bus="bus", + committable=True, + marginal_cost=70, + p_min_pu=0.1, + p_nom=1000) + + nu.add("Load","load",bus="bus",p_set=[4000,6000,5000,800]) + + solver_name = "glpk" + + nu.lopf(nu.snapshots,solver_name=solver_name, pyomo=False) expected_status = np.array([[1,1,1,0],[0,0,0,1]],dtype=float).T @@ -138,5 +179,5 @@ def test_minimum_down_time(): if __name__ == "__main__": test_minimum_down_time() test_minimum_up_time() - test_part_load(pyomo=True) - test_part_load(pyomo=False) + test_part_load() + test_part_load_without_pyomo() From ceb5406b15b26c68b8206741f54298d21f684c04 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 26 Nov 2019 22:10:07 +0100 Subject: [PATCH 106/111] test: assert optimality --- test/test_ac_dc_lopf.py | 6 +++--- test/test_opf_storage.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index 7b5bfb7d7..a47f9c089 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -13,7 +13,6 @@ def test_lopf(): "ac-dc-meshed", "ac-dc-data") n = pypsa.Network(csv_folder_name) - n.links_t.p_set.drop(columns=n.links.index, inplace=True) results_folder_name = os.path.join(csv_folder_name,"results-lopf") @@ -37,8 +36,9 @@ def test_lopf(): n_r.links_t.p0.loc[:,n.links.index],decimal=4) if sys.version_info.major >= 3: - n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) - + status, cond = n.lopf(snapshots=snapshots, solver_name=solver_name, + pyomo=False) + assert status == 'optimal' equal(n.generators_t.p.loc[:,n.generators.index], n_r.generators_t.p.loc[:,n.generators.index],decimal=2) equal(n.lines_t.p0.loc[:,n.lines.index], diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 00b4d8932..04a4d24b2 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -30,8 +30,8 @@ def test_opf(): for solver_name in solvers: - n.lopf(solver_name=solver_name, pyomo=False) - + status, cond = n.lopf(solver_name=solver_name, pyomo=False) + assert status == 'optimal' equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) From 91124737a155b27a5552d06ce49cdc4c76f30b53 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 27 Nov 2019 16:40:59 +0100 Subject: [PATCH 107/111] set status to 'ok' for optimal solutions --- pypsa/linopt.py | 6 +++++- test/test_ac_dc_lopf.py | 2 +- test/test_opf_storage.py | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 6f4d727a9..b6773ebbd 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -562,7 +562,7 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile, f.close() if data.startswith("Optimal - objective value"): - status = "optimal" + status = "ok" termination_condition = status objective = float(data[len("Optimal - objective value "):]) elif "Infeasible" in data: @@ -626,6 +626,8 @@ def read_until_break(f): if 'optimal' not in termination_condition: return status, termination_condition, None, None, None + else: + status = 'ok' duals = io.StringIO(''.join(read_until_break(f))[:-2]) duals = pd.read_fwf(duals)[1:].set_index('Row name') @@ -687,6 +689,8 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, termination_condition = status if termination_condition != "optimal": return status, termination_condition, None, None, None + else: + status = 'ok' variables_sol = pd.Series({v.VarName: v.x for v in m.getVars()}).pipe(set_int_index) diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index a47f9c089..7cfb831fa 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -38,7 +38,7 @@ def test_lopf(): if sys.version_info.major >= 3: status, cond = n.lopf(snapshots=snapshots, solver_name=solver_name, pyomo=False) - assert status == 'optimal' + assert status == 'ok' equal(n.generators_t.p.loc[:,n.generators.index], n_r.generators_t.p.loc[:,n.generators.index],decimal=2) equal(n.lines_t.p0.loc[:,n.lines.index], diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 04a4d24b2..320deb198 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -31,7 +31,7 @@ def test_opf(): for solver_name in solvers: status, cond = n.lopf(solver_name=solver_name, pyomo=False) - assert status == 'optimal' + assert status == 'ok' equal(n.generators_t.p.reindex_like(target_gen_p), target_gen_p, decimal=2) From b0f1d3a9d2d3844fbaa77b7cdcdf3f822e72c678 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Mon, 2 Dec 2019 18:07:27 +0100 Subject: [PATCH 108/111] linopt: Stop termination_condition being reset after gurobi solves --- pypsa/linopt.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index b6773ebbd..76ba4f28c 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -700,9 +700,7 @@ def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile, except AttributeError: logger.warning("Shadow prices of MILP couldn't be parsed") constraints_dual = pd.Series(index=[c.ConstrName for c in m.getConstrs()]) - termination_condition = status objective = m.ObjVal del m return (status, termination_condition, variables_sol, constraints_dual, objective) - From 7026e186e4040904cd36588d1ed525b31fb06ae1 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 11 Dec 2019 17:46:01 +0100 Subject: [PATCH 109/111] fix adding multiple global constraints --- pypsa/linopt.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index 76ba4f28c..a6a556ece 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -384,7 +384,9 @@ def _add_reference(ref_dict, df, attr, pnl=True): if ref_dict.df.empty: ref_dict.df[attr] = df else: - ref_dict.df.loc[df.index, attr] = df + ref_dict.df.loc[df.index, :] = df.to_frame(attr)\ + .reindex(columns=ref_dict.df.columns) +# ref_dict.df = pd.concat([ref_dict.df, df.to_frame(attr)]) def set_varref(n, variables, c, attr, spec=''): """ From 70ecac5e3a1e209e6b5cc40278f8d42cf8697301 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 16 Dec 2019 18:46:25 +0100 Subject: [PATCH 110/111] ensure carrier is defined in ilopf --- pypsa/linopf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pypsa/linopf.py b/pypsa/linopf.py index 593df321f..68dde7b3d 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -937,6 +937,7 @@ def ilopf(n, snapshots=None, msq_threshold=0.05, min_iterations=1, ''' + n.lines['carrier'] = n.lines.bus0.map(n.buses.carrier) ext_i = get_extendable_i(n, 'Line') typed_i = n.lines.query('type != ""').index ext_untyped_i = ext_i.difference(typed_i) From 8f443cd06fd79161dbec1e0254cb6dadf74b9750 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 16 Dec 2019 19:26:17 +0100 Subject: [PATCH 111/111] fix discussed error for multiple static global constraints --- pypsa/linopt.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pypsa/linopt.py b/pypsa/linopt.py index a6a556ece..fd871f799 100644 --- a/pypsa/linopt.py +++ b/pypsa/linopt.py @@ -384,9 +384,7 @@ def _add_reference(ref_dict, df, attr, pnl=True): if ref_dict.df.empty: ref_dict.df[attr] = df else: - ref_dict.df.loc[df.index, :] = df.to_frame(attr)\ - .reindex(columns=ref_dict.df.columns) -# ref_dict.df = pd.concat([ref_dict.df, df.to_frame(attr)]) + ref_dict.df = pd.concat([ref_dict.df, df.to_frame(attr)]) def set_varref(n, variables, c, attr, spec=''): """