diff --git a/case-repository/pom.xml b/case-repository/pom.xml
index 5c03568f..6f556767 100644
--- a/case-repository/pom.xml
+++ b/case-repository/pom.xml
@@ -39,11 +39,6 @@
ucte-util
${project.version}
-
- junit
- junit
- 4.12
-
org.jboss.shrinkwrap
shrinkwrap-impl-nio2
diff --git a/modules/pom.xml b/modules/pom.xml
index f9a72cfd..c133b7d2 100644
--- a/modules/pom.xml
+++ b/modules/pom.xml
@@ -114,6 +114,17 @@
mockito-all
test
+
+ ${project.groupId}
+ iidm-network-impl
+ ${project.version}
+ test
+
+
+ org.slf4j
+ log4j-over-slf4j
+ test
+
diff --git a/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifier.java b/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifier.java
new file mode 100644
index 00000000..226388a2
--- /dev/null
+++ b/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifier.java
@@ -0,0 +1,296 @@
+/**
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+package eu.itesla_project.modules.constraints;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import eu.itesla_project.iidm.network.Line;
+import eu.itesla_project.iidm.network.Network;
+import eu.itesla_project.iidm.network.StateManager;
+import eu.itesla_project.iidm.network.TwoTerminalsConnectable;
+import eu.itesla_project.iidm.network.TwoWindingsTransformer;
+import eu.itesla_project.iidm.network.VoltageLevel;
+import eu.itesla_project.modules.security.LimitViolation;
+import eu.itesla_project.modules.security.LimitViolationFilter;
+import eu.itesla_project.modules.security.LimitViolationType;
+import eu.itesla_project.modules.security.Security;
+
+/**
+ *
+ * @author Quinary
+ */
+public class ConstraintsModifier {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConstraintsModifier.class);
+
+ private Network network;
+ private ConstraintsModifierConfig config;
+
+ public ConstraintsModifier(Network network) {
+ this(network, ConstraintsModifierConfig.load());
+ }
+
+ public ConstraintsModifier(Network network, ConstraintsModifierConfig config) {
+ LOGGER.info(config.toString());
+ this.network = network;
+ this.config = config;
+ }
+
+ public void looseConstraints(String stateId) {
+ looseConstraints(stateId, 0f, false);
+ }
+
+ public void looseConstraints(String stateId, float margin) {
+ looseConstraints(stateId, margin, false);
+ }
+
+ public void looseConstraints(String stateId, float margin, boolean applyToBaseCase) {
+ if ( network.getStateManager().getStateIds().contains(stateId) ) {
+ String workingStateId = network.getStateManager().getWorkingStateId();
+ network.getStateManager().setWorkingState(stateId);
+ List violations = Security.checkLimits(network);
+ looseConstraints(stateId, violations, margin, applyToBaseCase);
+ network.getStateManager().setWorkingState(workingStateId);
+ } else {
+ throw new RuntimeException("No "+stateId+" in network "+network.getId()+": cannot loose constraints");
+ }
+ }
+
+ public void looseConstraints(String stateId, List violations) {
+ looseConstraints(stateId, violations, 0f, false);
+ }
+
+ public void looseConstraints(String stateId, List violations, float margin) {
+ looseConstraints(stateId, violations, margin, false);
+ }
+
+ public void looseConstraints(String stateId, List violations, float margin, boolean applyToBaseCase) {
+ Objects.requireNonNull(stateId, "state id is null");
+ Objects.requireNonNull(violations, "violations is null");
+ if ( network.getStateManager().getStateIds().contains(stateId) ) {
+ String workingStateId = network.getStateManager().getWorkingStateId();
+ network.getStateManager().setWorkingState(stateId);
+ LOGGER.info("Loosening constraints of network {}, state {}, using margin {}",
+ network.getId(),
+ network.getStateManager().getWorkingStateId(),
+ margin);
+ LimitViolationFilter violationsFilter = new LimitViolationFilter(new HashSet(config.getViolationsTypes()), 0);
+ List filteredViolations = violationsFilter.apply(violations);
+ String report = Security.printLimitsViolations(violations, violationsFilter);;
+ if (report != null) {
+ LOGGER.debug("Fixing constraints of network {}, state {}, causing the following {} violations:\n{}",
+ network.getId(),
+ network.getStateManager().getWorkingStateId(),
+ filteredViolations.size(),
+ report);
+ }
+ for (LimitViolation violation : filteredViolations) {
+ LOGGER.debug("Fixing the constraints causing the {} violation on equipment {}",
+ violation.getLimitType(),
+ violation.getSubject().getId());
+ switch (violation.getLimitType()) {
+ case CURRENT:
+ setNewCurrentLimit(stateId, violation, margin, applyToBaseCase);
+ break;
+ case HIGH_VOLTAGE:
+ setNewHighVoltageLimit(stateId, violation, margin, applyToBaseCase);
+ break;
+ case LOW_VOLTAGE:
+ setNewLowVoltageLimit(stateId, violation, margin, applyToBaseCase);
+ break;
+ }
+ }
+ network.getStateManager().setWorkingState(workingStateId);
+ } else {
+ throw new RuntimeException("No "+stateId+" in network "+network.getId()+": cannot loose constraints");
+ }
+ }
+
+ private void setNewCurrentLimit(String stateId, LimitViolation violation, float margin, boolean applyToBaseCase) {
+ TwoTerminalsConnectable violatedBranch = (TwoTerminalsConnectable) violation.getSubject();
+ // not sure if I need to reload the branch from the network ...
+ TwoTerminalsConnectable branch = null;
+ if ( violatedBranch instanceof Line )
+ branch = network.getLine(violatedBranch.getId());
+ else if ( violatedBranch instanceof TwoWindingsTransformer )
+ branch = network.getTwoWindingsTransformer(violatedBranch.getId());
+ if ( branch != null ) {
+ float newLimit = getNewUpperLimit(violation, margin);
+ if ( branch.getTerminal1().getI() == violation.getValue() ) {
+ LOGGER.debug("State {}: changing current limit 1 of branch {}: {} -> {}",
+ stateId,
+ branch.getId(),
+ violation.getLimit(),
+ newLimit);
+ branch.newCurrentLimits1().setPermanentLimit(newLimit).add();
+ if ( applyToBaseCase && !StateManager.INITIAL_STATE_ID.equals(stateId) ) { // change the limit also to basecase
+ String initialStateId = StateManager.INITIAL_STATE_ID;
+ network.getStateManager().setWorkingState(initialStateId);
+ if ( violatedBranch instanceof Line )
+ branch = network.getLine(violatedBranch.getId());
+ else if ( violatedBranch instanceof TwoWindingsTransformer )
+ branch = network.getTwoWindingsTransformer(violatedBranch.getId());
+ if ( branch != null ) {
+ LOGGER.debug("State {}: changing current limit 1 of branch {}: {} -> {}",
+ initialStateId,
+ branch.getId(),
+ violation.getLimit(),
+ newLimit);
+ branch.newCurrentLimits1().setPermanentLimit(newLimit).add();
+ } else {
+ LOGGER.warn("State {}: cannot change current limit of branch {}: no branch with this id in the network",
+ initialStateId,
+ violatedBranch.getId());
+ }
+ network.getStateManager().setWorkingState(stateId);
+ }
+ } else if ( branch.getTerminal2().getI() == violation.getValue() ) {
+ LOGGER.debug("State {}: changing current limit 2 of branch {}: {} -> {}",
+ stateId,
+ branch.getId(),
+ violation.getLimit(),
+ newLimit);
+ branch.newCurrentLimits2().setPermanentLimit(newLimit).add();
+ if ( applyToBaseCase && !StateManager.INITIAL_STATE_ID.equals(stateId) ) { // change the limit also to basecase
+ String initialStateId = StateManager.INITIAL_STATE_ID;
+ network.getStateManager().setWorkingState(initialStateId);
+ if ( violatedBranch instanceof Line )
+ branch = network.getLine(violatedBranch.getId());
+ else if ( violatedBranch instanceof TwoWindingsTransformer )
+ branch = network.getTwoWindingsTransformer(violatedBranch.getId());
+ if ( branch != null ) {
+ LOGGER.debug("State {}: changing current limit 2 of branch {}: {} -> {}",
+ initialStateId,
+ branch.getId(),
+ violation.getLimit(),
+ newLimit);
+ branch.newCurrentLimits2().setPermanentLimit(newLimit).add();
+ } else {
+ LOGGER.warn("State {}: cannot change current limit of branch {}: no branch with this id in the network",
+ initialStateId,
+ violatedBranch.getId());
+ }
+ network.getStateManager().setWorkingState(stateId);
+ }
+ }
+ } else {
+ LOGGER.warn("State {}: cannot change current limit of branch {}: no branch with this id in the network",
+ stateId,
+ violatedBranch.getId());
+ }
+ }
+
+ private void setNewHighVoltageLimit(String stateId, LimitViolation violation, float margin, boolean applyToBaseCase) {
+ VoltageLevel violatedVoltageLevel = (VoltageLevel) violation.getSubject();
+ VoltageLevel voltageLevel = network.getVoltageLevel(violatedVoltageLevel.getId());
+ if ( voltageLevel != null ) {
+ if ( violation.getValue() > voltageLevel.getHighVoltageLimit() ) { // it could already have been fixed
+ float newLimit = getNewUpperLimit(violation, margin);
+ LOGGER.debug("State {}: changing high voltage limit of voltage level {}: {} -> {}",
+ stateId,
+ voltageLevel.getId(),
+ violation.getLimit(),
+ newLimit);
+ voltageLevel.setHighVoltageLimit(newLimit);
+ if ( applyToBaseCase && !StateManager.INITIAL_STATE_ID.equals(stateId) ) { // change the limit also to basecase
+ String initialStateId = StateManager.INITIAL_STATE_ID;
+ network.getStateManager().setWorkingState(initialStateId);
+ voltageLevel = network.getVoltageLevel(violatedVoltageLevel.getId());
+ if ( voltageLevel != null ) {
+ LOGGER.debug("State {}: changing high voltage limit of voltage level {}: {} -> {}",
+ initialStateId,
+ voltageLevel.getId(),
+ violation.getLimit(),
+ newLimit);
+ voltageLevel.setHighVoltageLimit(newLimit);
+ } else {
+ LOGGER.warn("State {}: cannot change high voltage limit of voltage level {}: no voltage level with this id in the network",
+ initialStateId,
+ violatedVoltageLevel.getId());
+ }
+ network.getStateManager().setWorkingState(stateId);
+ }
+ }
+ } else {
+ LOGGER.warn("State {}: cannot change high voltage limit of voltage level {}: no voltage level with this id in the network",
+ stateId,
+ violatedVoltageLevel.getId());
+ }
+ }
+
+ private void setNewLowVoltageLimit(String stateId, LimitViolation violation, float margin, boolean applyToBaseCase) {
+ VoltageLevel violatedVoltageLevel = (VoltageLevel) violation.getSubject();
+ VoltageLevel voltageLevel = network.getVoltageLevel(violatedVoltageLevel.getId());
+ if ( voltageLevel != null ) {
+ if ( violation.getValue() < voltageLevel.getLowVoltageLimit() ) { // it could already have been fixed
+ float newLimit = getNewLowerLimit(violation, margin);
+ LOGGER.debug("State {}: changing low voltage limit of voltage level {}: {} -> {}",
+ stateId,
+ voltageLevel.getId(),
+ violation.getLimit(),
+ newLimit);
+ voltageLevel.setLowVoltageLimit(newLimit);
+ if ( applyToBaseCase && !StateManager.INITIAL_STATE_ID.equals(stateId) ) { // change the limit also to basecase
+ String initialStateId = StateManager.INITIAL_STATE_ID;
+ network.getStateManager().setWorkingState(initialStateId);
+ voltageLevel = network.getVoltageLevel(violatedVoltageLevel.getId());
+ if ( voltageLevel != null ) {
+ LOGGER.debug("State {}: changing low voltage limit of voltage level {}: {} -> {}",
+ initialStateId,
+ voltageLevel.getId(),
+ violation.getLimit(),
+ newLimit);
+ voltageLevel.setLowVoltageLimit(newLimit);
+ } else {
+ LOGGER.warn("State {}: cannot change high voltage limit of voltage level {}: no voltage level with this id in the network",
+ initialStateId,
+ violatedVoltageLevel.getId());
+ }
+ network.getStateManager().setWorkingState(stateId);
+ }
+ }
+ } else {
+ LOGGER.warn("State {}: cannot change low voltage limit of voltage level {}: no voltage level with this id in the network",
+ stateId,
+ violatedVoltageLevel.getId());
+ }
+ }
+
+ private float getNewUpperLimit(LimitViolation violation, float margin) {
+ float newLimit = 9999;
+ if ( config.isInAreaOfInterest(violation, network) ) {
+ float increment = (float) ((violation.getLimit() == 0)
+ ? Math.ceil(violation.getValue()*100)
+ : Math.ceil((violation.getValue()-violation.getLimit())*100/violation.getLimit()));
+ increment += margin;
+ newLimit = (violation.getLimit() == 0)
+ ? (increment/100)
+ : (violation.getLimit()+(violation.getLimit()*increment/100));
+ }
+ return newLimit;
+ }
+
+ private float getNewLowerLimit(LimitViolation violation, float margin) {
+ float newLimit = -9999;
+ if ( config.isInAreaOfInterest(violation, network) ) {
+ float increment = (float) ((violation.getLimit() == 0)
+ ? Math.ceil(-violation.getValue()*100)
+ : Math.ceil((violation.getLimit()-violation.getValue())*100/violation.getLimit()));
+ increment += margin;
+ newLimit = (violation.getLimit() == 0)
+ ? (increment/100)
+ : (violation.getLimit()-(violation.getLimit()*increment/100));
+ }
+ return newLimit;
+ }
+
+}
diff --git a/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfig.java b/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfig.java
new file mode 100644
index 00000000..f49b670b
--- /dev/null
+++ b/modules/src/main/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfig.java
@@ -0,0 +1,74 @@
+/**
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+package eu.itesla_project.modules.constraints;
+
+import java.util.Arrays;
+import java.util.List;
+
+import eu.itesla_project.commons.io.ModuleConfig;
+import eu.itesla_project.commons.io.PlatformConfig;
+import eu.itesla_project.iidm.network.Country;
+import eu.itesla_project.iidm.network.Network;
+import eu.itesla_project.modules.security.LimitViolation;
+import eu.itesla_project.modules.security.LimitViolationType;
+
+/**
+ *
+ * @author Quinary
+ */
+public class ConstraintsModifierConfig {
+
+ public static final List DEFAULT_VIOLATION_TYPES = Arrays.asList(LimitViolationType.CURRENT);
+ public static final Country DEFAULT_COUNTRY = null;
+
+ private final Country country;
+ private final List violationsTypes;
+
+ public static ConstraintsModifierConfig load() {
+ return load(PlatformConfig.defaultConfig());
+ }
+
+ public static ConstraintsModifierConfig load(PlatformConfig platformConfig) {
+ List violationsTypes;
+ Country country;
+ if (platformConfig.moduleExists("constraintsModifier")) {
+ ModuleConfig config = platformConfig.getModuleConfig("constraintsModifier");
+ violationsTypes = config.getEnumListProperty("violationsTypes", LimitViolationType.class, DEFAULT_VIOLATION_TYPES);
+ String countryStr = config.getStringProperty("country", null);
+ country = ( countryStr == null ) ? DEFAULT_COUNTRY : Country.valueOf(countryStr);
+ } else {
+ violationsTypes = DEFAULT_VIOLATION_TYPES;
+ country = DEFAULT_COUNTRY;
+ }
+ return new ConstraintsModifierConfig(country, violationsTypes);
+ }
+
+ public ConstraintsModifierConfig(Country country, List violationsTypes) {
+ this.country = country;
+ this.violationsTypes = violationsTypes;
+ }
+
+ public Country getCountry() {
+ return country;
+ }
+
+ public List getViolationsTypes() {
+ return violationsTypes;
+ }
+
+ public boolean isInAreaOfInterest(LimitViolation violation, Network network) {
+ if ( country != null )
+ return violation.getCountry() == country;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "ConstraintsModifierConfig[country="+country+",violation types="+violationsTypes.toString()+"]";
+ }
+
+}
diff --git a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java
index e2cab614..823a6e0b 100644
--- a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java
+++ b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java
@@ -1,5 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -28,11 +29,11 @@
* @author Quinary
*/
public class OnlineWorkflowParameters implements Serializable {
-
- /*
- * example of online-default-parameters.properties file.
- *
-
+
+ /*
+ * example of online-default-parameters.properties file.
+ *
+
# basecase to be analyzed
baseCaseDate=2013-01-15T18:30:00+01:00
# number of states
@@ -56,13 +57,15 @@ public class OnlineWorkflowParameters implements Serializable {
# list of security indexes to be used by the workflow, leave empty to use all the available ones
securityIndexes=SMALLSIGNAL,TSO_SYNCHROLOSS,TSO_GENERATOR_VOLTAGE_AUTOMATON
- */
+ */
private static final long serialVersionUID = 1L;
public static final CaseType DEFAULT_CASE_TYPE = CaseType.FO;
public static final List DEFAULT_COUNTRIES = Arrays.asList(Country.FR);
public static final boolean DAFAULT_MERGE_OPTIMIZED = false;
public static final float DEFAULT_LIMIT_REDUCTION = 1f;
+ public static final boolean DAFAULT_HANDLE_VIOLATIONS_IN_N = false;
+ public static final float DEFAULT_CONSTRAINT_MARGIN = 0f;
private DateTime baseCaseDate;
private int states;
@@ -71,7 +74,7 @@ public class OnlineWorkflowParameters implements Serializable {
private TimeHorizon timeHorizon;
private String feAnalysisId;
private double rulesPurityThreshold;
- private boolean storeStates;
+ private boolean storeStates;
private boolean analyseBasecase;
private boolean validation;
private Set securityIndexes;
@@ -79,8 +82,10 @@ public class OnlineWorkflowParameters implements Serializable {
private Set countries;
private boolean mergeOptimized;
private float limitReduction;
+ private boolean handleViolationsInN;
+ private float constraintMargin;
- public static OnlineWorkflowParameters loadDefault() {
+ public static OnlineWorkflowParameters loadDefault() {
ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters");
DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate"));
int states = config.getIntProperty("states");
@@ -93,38 +98,43 @@ public static OnlineWorkflowParameters loadDefault() {
boolean analyseBasecase = config.getBooleanProperty("analyseBasecase", true);
boolean validation = config.getBooleanProperty("validation", false);
Set securityIndexes = null;
- String securityIndexesAsString = config.getStringProperty("securityIndexes", "");
- if ( !"".equals(securityIndexesAsString) ) {
- securityIndexes = Arrays.stream(securityIndexesAsString.split(","))
+ String securityIndexesAsString = config.getStringProperty("securityIndexes", "");
+ if ( !"".equals(securityIndexesAsString) ) {
+ securityIndexes = Arrays.stream(securityIndexesAsString.split(","))
.map(SecurityIndexType::valueOf)
.collect(Collectors.toSet());
- }
- CaseType caseType = config.getEnumProperty("caseType", CaseType.class, DEFAULT_CASE_TYPE);
- Set countries = new HashSet<>(config.getEnumListProperty("countries", Country.class, DEFAULT_COUNTRIES));
- boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DAFAULT_MERGE_OPTIMIZED);
- float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION);
-
+ }
+ CaseType caseType = config.getEnumProperty("caseType", CaseType.class, DEFAULT_CASE_TYPE);
+ Set countries = new HashSet<>(config.getEnumListProperty("countries", Country.class, DEFAULT_COUNTRIES));
+ boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DAFAULT_MERGE_OPTIMIZED);
+ float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION);
+ boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DAFAULT_HANDLE_VIOLATIONS_IN_N);
+ float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN);
+
return new OnlineWorkflowParameters(baseCaseDate,
- states,
- histoInterval,
- offlineWorkflowId,
- timeHorizon,
- feAnalysisId,
- rulesPurityThreshold,
- storeStates,
- analyseBasecase,
- validation,
- securityIndexes,
- caseType,
- countries,
- mergeOptimized,
- limitReduction
- );
+ states,
+ histoInterval,
+ offlineWorkflowId,
+ timeHorizon,
+ feAnalysisId,
+ rulesPurityThreshold,
+ storeStates,
+ analyseBasecase,
+ validation,
+ securityIndexes,
+ caseType,
+ countries,
+ mergeOptimized,
+ limitReduction,
+ handleViolationsInN,
+ constraintMargin
+ );
}
public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon,
- String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation,
- Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, float limitReduction) {
+ String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation,
+ Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized,
+ float limitReduction, boolean handleViolationsInN, float constraintMargin) {
Objects.requireNonNull(baseCaseDate);
Objects.requireNonNull(histoInterval);
this.baseCaseDate = baseCaseDate;
@@ -142,6 +152,8 @@ public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval hist
this.countries = countries;
this.mergeOptimized = mergeOptimized;
this.limitReduction = limitReduction;
+ this.handleViolationsInN = handleViolationsInN;
+ this.constraintMargin = constraintMargin;
}
public DateTime getBaseCaseDate() {
@@ -153,56 +165,64 @@ public Interval getHistoInterval() {
}
public int getStates() {
- return states;
+ return states;
}
public String getOfflineWorkflowId() {
- return offlineWorkflowId;
+ return offlineWorkflowId;
}
public TimeHorizon getTimeHorizon() {
- return timeHorizon;
+ return timeHorizon;
}
-
+
public String getFeAnalysisId() {
- return feAnalysisId;
+ return feAnalysisId;
}
-
+
public double getRulesPurityThreshold() {
- return rulesPurityThreshold;
- }
+ return rulesPurityThreshold;
+ }
public boolean storeStates() {
- return storeStates;
+ return storeStates;
}
-
+
public boolean analyseBasecase() {
- return analyseBasecase;
+ return analyseBasecase;
}
-
+
public boolean validation() {
- return validation;
+ return validation;
}
-
+
public Set getSecurityIndexes() {
- return securityIndexes;
- }
-
+ return securityIndexes;
+ }
+
public CaseType getCaseType() {
- return caseType;
- }
-
+ return caseType;
+ }
+
public Set getCountries() {
- return countries;
- }
-
- public boolean isMergeOptimized() {
- return mergeOptimized;
- }
-
- public float getLimitReduction() {
- return limitReduction;
- }
+ return countries;
+ }
+
+ public boolean isMergeOptimized() {
+ return mergeOptimized;
+ }
+
+ public float getLimitReduction() {
+ return limitReduction;
+ }
+
+ public boolean isHandleViolationsInN() {
+ return handleViolationsInN;
+ }
+
+ public float getConstraintMargin() {
+ return constraintMargin;
+ }
@Override
public String toString() {
@@ -221,67 +241,77 @@ public String toString() {
+ ", countries=" + countries
+ ", mergeOptimized=" + mergeOptimized
+ ", limitReduction=" + limitReduction
+ + ", handleViolationsInN=" + handleViolationsInN
+ + ", constraintMargin=" + constraintMargin
+ "}";
}
- public void setStates(int states) {
- this.states = states;
- }
-
- public void setBaseCaseDate(DateTime baseCaseDate) {
- this.baseCaseDate = baseCaseDate;
- }
-
- public void setHistoInterval(Interval histoInterval) {
- this.histoInterval = histoInterval;
- }
-
- public void setOfflineWorkflowId(String offlineWorkflowId) {
- this.offlineWorkflowId = offlineWorkflowId;
- }
-
- public void setTimeHorizon(TimeHorizon timeHorizon) {
- this.timeHorizon = timeHorizon;
- }
-
- public void setFeAnalysisId(String feAnalysisId) {
- this.feAnalysisId = feAnalysisId;
- }
-
- public void setRulesPurityThreshold(double rulesPurityThreshold) {
- this.rulesPurityThreshold = rulesPurityThreshold;
- }
-
- public void setStoreStates(boolean storeStates) {
- this.storeStates = storeStates;
- }
-
- public void setAnalyseBasecase(boolean analyseBasecase) {
- this.analyseBasecase = analyseBasecase;
- }
-
- public void setValidation(boolean validation) {
- this.validation = validation;
- }
-
- public void setSecurityIndexes(Set securityIndexes) {
- this.securityIndexes = securityIndexes;
- }
-
- public void setCaseType(CaseType caseType) {
- this.caseType = caseType;
- }
-
- public void setCountries(Set countries) {
- this.countries = countries;
- }
-
- public void setMergeOptimized(boolean mergeOptimized) {
- this.mergeOptimized = mergeOptimized;
- }
-
- public void setLimitReduction(float limitReduction) {
- this.limitReduction = limitReduction;
- }
-
+ public void setStates(int states) {
+ this.states = states;
+ }
+
+ public void setBaseCaseDate(DateTime baseCaseDate) {
+ this.baseCaseDate = baseCaseDate;
+ }
+
+ public void setHistoInterval(Interval histoInterval) {
+ this.histoInterval = histoInterval;
+ }
+
+ public void setOfflineWorkflowId(String offlineWorkflowId) {
+ this.offlineWorkflowId = offlineWorkflowId;
+ }
+
+ public void setTimeHorizon(TimeHorizon timeHorizon) {
+ this.timeHorizon = timeHorizon;
+ }
+
+ public void setFeAnalysisId(String feAnalysisId) {
+ this.feAnalysisId = feAnalysisId;
+ }
+
+ public void setRulesPurityThreshold(double rulesPurityThreshold) {
+ this.rulesPurityThreshold = rulesPurityThreshold;
+ }
+
+ public void setStoreStates(boolean storeStates) {
+ this.storeStates = storeStates;
+ }
+
+ public void setAnalyseBasecase(boolean analyseBasecase) {
+ this.analyseBasecase = analyseBasecase;
+ }
+
+ public void setValidation(boolean validation) {
+ this.validation = validation;
+ }
+
+ public void setSecurityIndexes(Set securityIndexes) {
+ this.securityIndexes = securityIndexes;
+ }
+
+ public void setCaseType(CaseType caseType) {
+ this.caseType = caseType;
+ }
+
+ public void setCountries(Set countries) {
+ this.countries = countries;
+ }
+
+ public void setMergeOptimized(boolean mergeOptimized) {
+ this.mergeOptimized = mergeOptimized;
+ }
+
+ public void setLimitReduction(float limitReduction) {
+ this.limitReduction = limitReduction;
+ }
+
+ public void setHandleViolationsInN(boolean handleViolationsInN) {
+ this.handleViolationsInN = handleViolationsInN;
+ }
+
+ public void setConstraintMargin(float constraintMargin) {
+ this.constraintMargin = constraintMargin;
+ }
+
}
diff --git a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowRulesResults.java b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowRulesResults.java
index 1830c18b..3976eea7 100644
--- a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowRulesResults.java
+++ b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowRulesResults.java
@@ -1,5 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -10,6 +11,8 @@
import java.util.List;
import java.util.Map;
+import eu.itesla_project.modules.securityindexes.SecurityIndexType;
+
/**
* The results of the security rules application during a run of the online workflow
*
@@ -17,46 +20,62 @@
* @author Quinary
*/
public interface OnlineWorkflowRulesResults {
-
- /**
- * Get the id of the workflow where the rules have been applied
- * @return the id of the workflow
- */
- String getWorkflowId();
-
- /**
- * Get the time horizon used for the workflow where the rules have been applied
- * @return the time horizon of the workflow
- */
- TimeHorizon getTimeHorizon();
-
- /**
- * Get the contingencies analyzed by the workflow with security rules
- * @return the collection of ids of the contingencies analyzed with security rules
- */
- Collection getContingenciesWithSecurityRulesResults();
-
- /**
- * Get the states analyzed by the workflow with security rules for a specific contingency
- * @param contingencyId the id of the contingency
- * @return the list of ids of the states analyzed with security rules
- */
- List getStatesWithSecurityRulesResults(String contingencyId);
-
- /**
- * Get the status (SAFE, SAFE_WITH_CORRECTIVE_ACTIONS, UNSAFE) of a state for a specific contingency
- * @param contingencyId the id of the contingency
- * @param stateId the id of the state
- * @return the status (SAFE, SAFE_WITH_CORRECTIVE_ACTIONS, UNSAFE) of the state
- */
- StateStatus getStateStatus(String contingencyId, Integer stateId);
-
- /**
- * Get the results of the application of the security rules to a state for a contingency
- * @param contingencyId the id of the contingency
- * @param stateId the id of the state
- * @return the map of [index, security flag] pair, output of the application of the security rules on a state for a contingency
- */
- Map getStateResults(String contingencyId, Integer stateId);
+
+ /**
+ * Get the id of the workflow where the rules have been applied
+ * @return the id of the workflow
+ */
+ String getWorkflowId();
+
+ /**
+ * Get the time horizon used for the workflow where the rules have been applied
+ * @return the time horizon of the workflow
+ */
+ TimeHorizon getTimeHorizon();
+
+ /**
+ * Get the contingencies analyzed by the workflow with security rules
+ * @return the collection of ids of the contingencies analyzed with security rules
+ */
+ Collection getContingenciesWithSecurityRulesResults();
+
+ /**
+ * Get the states analyzed by the workflow with security rules for a specific contingency
+ * @param contingencyId the id of the contingency
+ * @return the list of ids of the states analyzed with security rules
+ */
+ List getStatesWithSecurityRulesResults(String contingencyId);
+
+ /**
+ * Get the status (SAFE, SAFE_WITH_CORRECTIVE_ACTIONS, UNSAFE) of a state for a specific contingency
+ * @param contingencyId the id of the contingency
+ * @param stateId the id of the state
+ * @return the status (SAFE, SAFE_WITH_CORRECTIVE_ACTIONS, UNSAFE) of the state
+ */
+ StateStatus getStateStatus(String contingencyId, Integer stateId);
+
+ /**
+ * Get the results of the application of the security rules to a state for a contingency
+ * @param contingencyId the id of the contingency
+ * @param stateId the id of the state
+ * @return the map of [index, security flag] pair, output of the application of the security rules on a state for a contingency
+ */
+ Map getStateResults(String contingencyId, Integer stateId);
+
+ /**
+ * Return if there are available rules for a state and a contingency
+ * @param contingencyId the id of the contingency
+ * @param stateId the id of the state
+ * @return true if there are available rules for a state and a contingency, false otherwise
+ */
+ boolean areValidRulesAvailable(String contingencyId, Integer stateId);
+
+ /**
+ * Get the list of invalid rules types (phenomena), for a state and a contingency
+ * @param contingencyId the id of the contingency
+ * @param stateId the id of the state
+ * @return the list of invalid rules types (phenomena), for a state and a contingency
+ */
+ List getInvalidRules(String contingencyId, Integer stateId);
}
diff --git a/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeParameters.java b/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeParameters.java
index c79f31ca..ab661689 100644
--- a/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeParameters.java
+++ b/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeParameters.java
@@ -20,37 +20,44 @@
public class RulesFacadeParameters {
private final String offlineWorkflowId;
- private final List contingencies;
- private final double purityThreshold;
- private final Set securityIndexTypes; // all security index types if null
- private final boolean wcaRules;
-
- public RulesFacadeParameters(String offlineWorkflowId, List contingencies, double purityThreshold,
- Set securityIndexTypes, boolean wcaRules) {
+ private final List contingencies;
+ private final double purityThreshold;
+ private final Set securityIndexTypes; // all security index types if null
+ private final boolean wcaRules;
+ private final boolean checkRules;
+
+ public RulesFacadeParameters(String offlineWorkflowId, List contingencies, double purityThreshold,
+ Set securityIndexTypes, boolean wcaRules, boolean checkRules) {
this.offlineWorkflowId = offlineWorkflowId;
- this.contingencies = contingencies;
- this.purityThreshold = purityThreshold;
- this.securityIndexTypes = securityIndexTypes;
- this.wcaRules = wcaRules;
- }
+ this.contingencies = contingencies;
+ this.purityThreshold = purityThreshold;
+ this.securityIndexTypes = securityIndexTypes;
+ this.wcaRules = wcaRules;
+ this.checkRules = checkRules;
+ }
public String getOfflineWorkflowId() {
return offlineWorkflowId;
}
- public List getContingencies() {
- return contingencies;
- }
-
- public double getPurityThreshold() {
- return purityThreshold;
- }
-
- public Set getSecurityIndexTypes() {
- return securityIndexTypes;
- }
-
- public boolean wcaRules() {
- return wcaRules;
- }
+ public List getContingencies() {
+ return contingencies;
+ }
+
+ public double getPurityThreshold() {
+ return purityThreshold;
+ }
+
+ public Set getSecurityIndexTypes() {
+ return securityIndexTypes;
+ }
+
+ public boolean wcaRules() {
+ return wcaRules;
+ }
+
+ public boolean isCheckRules() {
+ return checkRules;
+ }
+
}
diff --git a/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeResults.java b/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeResults.java
index 686fe937..c272a6b9 100644
--- a/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeResults.java
+++ b/modules/src/main/java/eu/itesla_project/modules/online/RulesFacadeResults.java
@@ -7,42 +7,56 @@
*/
package eu.itesla_project.modules.online;
-import eu.itesla_project.modules.securityindexes.SecurityIndexType;
-
+import java.util.List;
import java.util.Map;
+import eu.itesla_project.modules.securityindexes.SecurityIndexType;
+
/**
*
* @author Quinary
*/
public class RulesFacadeResults {
- private final String stateId;
- private final String contingencyId;
- private final StateStatus stateStatus;
- private final Map indexesResults;
-
- public RulesFacadeResults(String stateId, String contingencyId, StateStatus stateStatus, Map indexesResults) {
- this.stateId = stateId;
- this.contingencyId = contingencyId;
- this.stateStatus = stateStatus;
- this.indexesResults = indexesResults;
- }
-
- public String getStateId() {
- return stateId;
- }
-
- public String getContingencyId() {
- return contingencyId;
+ private final String stateId;
+ private final String contingencyId;
+ private final StateStatus stateStatus;
+ private final Map indexesResults;
+ private final List invalidRules;
+ private final boolean rulesAvailable;
+
+ public RulesFacadeResults(String stateId, String contingencyId, StateStatus stateStatus, Map indexesResults,
+ List invalidRules, boolean rulesAvailable) {
+ this.stateId = stateId;
+ this.contingencyId = contingencyId;
+ this.stateStatus = stateStatus;
+ this.indexesResults = indexesResults;
+ this.invalidRules = invalidRules;
+ this.rulesAvailable = rulesAvailable;
+ }
+
+ public String getStateId() {
+ return stateId;
+ }
+
+ public String getContingencyId() {
+ return contingencyId;
+ }
+
+ public StateStatus getStateStatus() {
+ return stateStatus;
+ }
+
+ public Map getIndexesResults() {
+ return indexesResults;
+ }
+
+ public List getInvalidRules() {
+ return invalidRules;
}
-
- public StateStatus getStateStatus() {
- return stateStatus;
- }
-
- public Map getIndexesResults() {
- return indexesResults;
+
+ public boolean areRulesAvailable() {
+ return rulesAvailable;
}
}
diff --git a/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfigTest.java b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfigTest.java
new file mode 100644
index 00000000..f1ec2373
--- /dev/null
+++ b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierConfigTest.java
@@ -0,0 +1,81 @@
+/**
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+package eu.itesla_project.modules.constraints;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.file.FileSystem;
+import java.util.Arrays;
+import java.util.List;
+
+import org.jboss.shrinkwrap.api.ShrinkWrap;
+import org.jboss.shrinkwrap.api.nio.file.ShrinkWrapFileSystems;
+import org.jboss.shrinkwrap.api.spec.JavaArchive;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import eu.itesla_project.commons.io.InMemoryPlatformConfig;
+import eu.itesla_project.commons.io.MapModuleConfig;
+import eu.itesla_project.iidm.network.Country;
+import eu.itesla_project.iidm.network.Network;
+import eu.itesla_project.modules.security.LimitViolation;
+import eu.itesla_project.modules.security.LimitViolationType;
+
+/**
+ *
+ * @author Quinary
+ */
+public class ConstraintsModifierConfigTest {
+
+ private FileSystem fileSystem;
+ private InMemoryPlatformConfig platformConfig;
+ private Network network;
+ private List violations;
+
+ @Before
+ public void setUp() throws Exception {
+ JavaArchive archive = ShrinkWrap.create(JavaArchive.class);
+ fileSystem = ShrinkWrapFileSystems.newFileSystem(archive);
+ platformConfig = new InMemoryPlatformConfig(fileSystem);
+ network = ConstraintsModifierTestUtils.getNetwork();
+ violations = ConstraintsModifierTestUtils.getViolations(network);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ fileSystem.close();
+ }
+
+ @Test
+ public void testNoConfig() throws Exception {
+ ConstraintsModifierConfig config = ConstraintsModifierConfig.load(platformConfig);
+ checkValues(config, ConstraintsModifierConfig.DEFAULT_COUNTRY, ConstraintsModifierConfig.DEFAULT_VIOLATION_TYPES);
+ }
+
+ @Test
+ public void testLoadConfig() throws Exception {
+ Country country = Country.FR;
+ LimitViolationType violationType = LimitViolationType.CURRENT;
+ MapModuleConfig moduleConfig = platformConfig.createModuleConfig("constraintsModifier");
+ moduleConfig.setStringListProperty("country", Arrays.asList(country.name()));
+ moduleConfig.setStringListProperty("violationsTypes", Arrays.asList(violationType.name()));
+ ConstraintsModifierConfig config = ConstraintsModifierConfig.load(platformConfig);
+ checkValues(config, country, Arrays.asList(violationType));
+ }
+
+ private void checkValues(ConstraintsModifierConfig config, Country expectedCountry, List expectedViolationTypes) {
+ assertEquals(expectedCountry, config.getCountry());
+ assertArrayEquals(expectedViolationTypes.toArray(), config.getViolationsTypes().toArray());
+ for(LimitViolation violation : violations) {
+ assertTrue(config.isInAreaOfInterest(violation, network));
+ }
+ }
+
+}
diff --git a/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTest.java b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTest.java
new file mode 100644
index 00000000..6e2df6d1
--- /dev/null
+++ b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTest.java
@@ -0,0 +1,180 @@
+/**
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+package eu.itesla_project.modules.constraints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import eu.itesla_project.iidm.network.Line;
+import eu.itesla_project.iidm.network.Network;
+import eu.itesla_project.iidm.network.StateManager;
+import eu.itesla_project.iidm.network.VoltageLevel;
+import eu.itesla_project.modules.security.LimitViolation;
+import eu.itesla_project.modules.security.LimitViolationType;
+
+/**
+ *
+ * @author Quinary
+ */
+public class ConstraintsModifierTest {
+
+ private Network network;
+ private List violations;
+ private ConstraintsModifierConfig config;
+
+ @Before
+ public void setUp() throws Exception {
+ network = ConstraintsModifierTestUtils.getNetwork();
+ violations = ConstraintsModifierTestUtils.getViolations(network);
+ List violationTypes = Arrays.asList(LimitViolationType.CURRENT,
+ LimitViolationType.HIGH_VOLTAGE,
+ LimitViolationType.LOW_VOLTAGE);
+ config = new ConstraintsModifierConfig(ConstraintsModifierConfig.DEFAULT_COUNTRY, violationTypes);
+ }
+
+ private void checkOriginalNetworkLimits() {
+ Line line = network.getLine(ConstraintsModifierTestUtils.LINE_ID);
+ assertEquals(line.getCurrentLimits1().getPermanentLimit(), ConstraintsModifierTestUtils.CURRENT_LIMIT, 0);
+ VoltageLevel voltageLevel1 = network.getVoltageLevel(ConstraintsModifierTestUtils.VOLTAGE_LEVEL_1_ID);
+ assertEquals(voltageLevel1.getHighVoltageLimit(), ConstraintsModifierTestUtils.HIGH_VOLTAGE_LIMIT, 0);
+ VoltageLevel voltageLevel2 = network.getVoltageLevel(ConstraintsModifierTestUtils.VOLTAGE_LEVEL_2_ID);
+ assertEquals(voltageLevel2.getLowVoltageLimit(), ConstraintsModifierTestUtils.LOW_VOLTAGE_LIMIT, 0);
+ }
+
+ private void checkModifiedNetworkLimits(int margin) {
+ Line line = network.getLine(ConstraintsModifierTestUtils.LINE_ID);
+ float newCurrentLimit = ConstraintsModifierTestUtils.NEW_CURRENT_LIMIT + (ConstraintsModifierTestUtils.CURRENT_LIMIT * margin / 100);
+ assertEquals(newCurrentLimit, line.getCurrentLimits1().getPermanentLimit(), 0);
+ VoltageLevel voltageLevel1 = network.getVoltageLevel(ConstraintsModifierTestUtils.VOLTAGE_LEVEL_1_ID);
+ float newHighVoltageLimit = ConstraintsModifierTestUtils.NEW_HIGH_VOLTAGE_LIMIT + (ConstraintsModifierTestUtils.HIGH_VOLTAGE_LIMIT * margin / 100);
+ assertEquals(newHighVoltageLimit, voltageLevel1.getHighVoltageLimit(), 0);
+ VoltageLevel voltageLevel2 = network.getVoltageLevel(ConstraintsModifierTestUtils.VOLTAGE_LEVEL_2_ID);
+ float newLowVoltageLimit = ConstraintsModifierTestUtils.NEW_LOW_VOLTAGE_LIMIT - (ConstraintsModifierTestUtils.LOW_VOLTAGE_LIMIT * margin / 100);
+ assertEquals(newLowVoltageLimit, voltageLevel2.getLowVoltageLimit(), 0);
+ }
+
+ @Test
+ public void testNoMargin() throws Exception {
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(StateManager.INITIAL_STATE_ID);
+
+ checkModifiedNetworkLimits(0);
+ }
+
+ @Test
+ public void testWithMargin() throws Exception {
+ int margin = 3;
+
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(StateManager.INITIAL_STATE_ID, margin);
+
+ checkModifiedNetworkLimits(margin);
+ }
+
+ @Test
+ public void testWithViolationsNoMargin() throws Exception {
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(StateManager.INITIAL_STATE_ID, violations);
+
+ checkModifiedNetworkLimits(0);
+ }
+
+ @Test
+ public void testWithViolationsAndMargin() throws Exception {
+ int margin = 3;
+
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(StateManager.INITIAL_STATE_ID, violations, margin);
+
+ checkModifiedNetworkLimits(margin);
+ }
+
+ @Test
+ public void testWithViolationsAndMarginApplyBasecase() throws Exception {
+ int margin = 3;
+
+ checkOriginalNetworkLimits();
+
+ String stateId = "0";
+ network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, stateId);
+ network.getStateManager().setWorkingState(stateId);
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(stateId, violations, margin, true);
+
+ checkModifiedNetworkLimits(margin);
+
+ network.getStateManager().setWorkingState(StateManager.INITIAL_STATE_ID);
+ checkModifiedNetworkLimits(margin);
+
+ network.getStateManager().removeState(stateId);
+ }
+
+ @Test
+ public void testWithMarginApplyBasecase() throws Exception {
+ int margin = 3;
+
+ checkOriginalNetworkLimits();
+
+ String stateId = "0";
+ network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, stateId);
+ network.getStateManager().setWorkingState(stateId);
+ checkOriginalNetworkLimits();
+
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ constraintsModifier.looseConstraints(stateId, margin, true);
+
+ checkModifiedNetworkLimits(margin);
+
+ network.getStateManager().setWorkingState(StateManager.INITIAL_STATE_ID);
+ checkModifiedNetworkLimits(margin);
+
+ network.getStateManager().removeState(stateId);
+ }
+
+ @Test
+ public void testWithNullValues() throws Exception {
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ try {
+ constraintsModifier.looseConstraints(null, violations);
+ fail();
+ } catch (Throwable e) {
+
+ }
+ try {
+ constraintsModifier.looseConstraints(StateManager.INITIAL_STATE_ID, null);
+ fail();
+ } catch (Throwable e) {
+ }
+ }
+
+ @Test
+ public void testWithWrongState() throws Exception {
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(network, config);
+ try {
+ constraintsModifier.looseConstraints("wrongState", violations);
+ fail();
+ } catch (Throwable e) {
+ }
+ }
+
+}
diff --git a/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTestUtils.java b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTestUtils.java
new file mode 100644
index 00000000..3dbb3bbc
--- /dev/null
+++ b/modules/src/test/java/eu/itesla_project/modules/constraints/ConstraintsModifierTestUtils.java
@@ -0,0 +1,115 @@
+/**
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+package eu.itesla_project.modules.constraints;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import eu.itesla_project.iidm.network.Bus;
+import eu.itesla_project.iidm.network.Country;
+import eu.itesla_project.iidm.network.Line;
+import eu.itesla_project.iidm.network.Network;
+import eu.itesla_project.iidm.network.NetworkFactory;
+import eu.itesla_project.iidm.network.Substation;
+import eu.itesla_project.iidm.network.TopologyKind;
+import eu.itesla_project.iidm.network.VoltageLevel;
+import eu.itesla_project.modules.security.LimitViolation;
+import eu.itesla_project.modules.security.LimitViolationType;
+
+/**
+ *
+ * @author Quinary
+ */
+public class ConstraintsModifierTestUtils {
+
+ public static final String VOLTAGE_LEVEL_1_ID = "vl1";
+ public static final float HIGH_VOLTAGE_LIMIT = 300f;
+ public static final float NEW_HIGH_VOLTAGE_LIMIT = 381f;
+ public static final String VOLTAGE_LEVEL_2_ID = "vl2";
+ public static final float LOW_VOLTAGE_LIMIT = 420f;
+ public static final float NEW_LOW_VOLTAGE_LIMIT = 378f;
+ public static final String LINE_ID = "line1";
+ public static final float CURRENT_LIMIT = 100f;
+ public static final float NEW_CURRENT_LIMIT = 120f;
+ private static final float CURRENT_VALUE = 119.25632f;
+ private static final float V = 380f;
+ private static final float Q = 55f;
+ private static final float P = 56f;
+ private static final Country COUNTRY = Country.FR;
+
+ public static Network getNetwork() {
+ Network n = NetworkFactory.create("test1", "test");
+ Substation s1 = n.newSubstation()
+ .setId("s1")
+ .setCountry(COUNTRY)
+ .add();
+ VoltageLevel vl1 = s1.newVoltageLevel()
+ .setId(VOLTAGE_LEVEL_1_ID)
+ .setNominalV(V)
+ .setTopologyKind(TopologyKind.BUS_BREAKER)
+ .setHighVoltageLimit(HIGH_VOLTAGE_LIMIT)
+ .setLowVoltageLimit(200f)
+ .add();
+ Bus b1 = vl1.getBusBreakerView().newBus()
+ .setId("b1")
+ .add();
+ b1.setV(V);
+ Substation s2 = n.newSubstation()
+ .setId("s2")
+ .setCountry(COUNTRY)
+ .add();
+ VoltageLevel vl2 = s2.newVoltageLevel()
+ .setId(VOLTAGE_LEVEL_2_ID)
+ .setNominalV(V)
+ .setTopologyKind(TopologyKind.BUS_BREAKER)
+ .setHighVoltageLimit(550f)
+ .setLowVoltageLimit(LOW_VOLTAGE_LIMIT)
+ .add();
+ Bus b2 = vl2.getBusBreakerView().newBus()
+ .setId("b2")
+ .add();
+ b2.setV(V);
+ Line l1 = n.newLine()
+ .setId(LINE_ID)
+ .setVoltageLevel1(VOLTAGE_LEVEL_1_ID)
+ .setBus1("b1")
+ .setConnectableBus1("b1")
+ .setVoltageLevel2(VOLTAGE_LEVEL_2_ID)
+ .setBus2("b2")
+ .setConnectableBus2("b2")
+ .setR(3)
+ .setX(33)
+ .setG1(0)
+ .setB1(386E-6f / 2)
+ .setG2(0f)
+ .setB2(386E-6f / 2)
+ .add();
+ l1.newCurrentLimits1()
+ .setPermanentLimit(CURRENT_LIMIT)
+ .add();
+ l1.newCurrentLimits2()
+ .setPermanentLimit(CURRENT_LIMIT)
+ .add();
+ l1.getTerminal1().setP(P);
+ l1.getTerminal1().setQ(Q);
+ l1.getTerminal2().setP(P);
+ l1.getTerminal2().setQ(Q);
+ return n;
+ }
+
+ public static List getViolations(Network network) {
+ List violations = new ArrayList();
+ Line line = network.getLine(LINE_ID);
+ violations.add(new LimitViolation(line, LimitViolationType.CURRENT, CURRENT_LIMIT, 1, CURRENT_VALUE, COUNTRY, Float.NaN));
+ VoltageLevel voltageLevel = network.getVoltageLevel(VOLTAGE_LEVEL_1_ID);
+ violations.add(new LimitViolation(voltageLevel, LimitViolationType.HIGH_VOLTAGE, HIGH_VOLTAGE_LIMIT, 1, V, COUNTRY, Float.NaN));
+ VoltageLevel voltageLevel2 = network.getVoltageLevel(VOLTAGE_LEVEL_2_ID);
+ violations.add(new LimitViolation(voltageLevel2, LimitViolationType.LOW_VOLTAGE, LOW_VOLTAGE_LIMIT, 1, V, COUNTRY, Float.NaN));
+ return violations;
+ }
+
+}
diff --git a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java
index 32bca213..b3fd7983 100644
--- a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java
+++ b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java
@@ -32,6 +32,7 @@
import eu.itesla_project.merge.MergeOptimizerFactory;
import eu.itesla_project.merge.MergeUtil;
import eu.itesla_project.cases.CaseRepository;
+import eu.itesla_project.modules.constraints.ConstraintsModifier;
import eu.itesla_project.modules.contingencies.ContingenciesAndActionsDatabaseClient;
import eu.itesla_project.modules.ddb.DynamicDatabaseClientFactory;
import eu.itesla_project.modules.histo.HistoDbClient;
@@ -69,354 +70,364 @@
*/
public class OnlineWorkflowImpl implements OnlineWorkflow {
- Logger logger = LoggerFactory.getLogger(OnlineWorkflowImpl.class);
+ Logger logger = LoggerFactory.getLogger(OnlineWorkflowImpl.class);
- private final ComputationManager computationManager;
- private final ContingenciesAndActionsDatabaseClient cadbClient;
- private final DynamicDatabaseClientFactory ddbClientFactory;
- private final HistoDbClient histoDbClient;
- private final RulesDbClient rulesDbClient;
- private final ForecastErrorsDataStorage feDataStorage;
+ private final ComputationManager computationManager;
+ private final ContingenciesAndActionsDatabaseClient cadbClient;
+ private final DynamicDatabaseClientFactory ddbClientFactory;
+ private final HistoDbClient histoDbClient;
+ private final RulesDbClient rulesDbClient;
+ private final ForecastErrorsDataStorage feDataStorage;
private final OnlineWorkflowParameters parameters;
- private List listeners=new ArrayList();
+ private List listeners=new ArrayList();
private final CaseRepository caseRepository;
private final WCAFactory wcaFactory;
private final LoadFlowFactory loadFlowFactory;
private final OnlineDb onlineDb;
private final UncertaintiesAnalyserFactory uncertaintiesAnalyserFactory;
private final CorrectiveControlOptimizerFactory optimizerFactory;
- private final SimulatorFactory simulatorFactory;
- private final MontecarloSamplerFactory montecarloSamplerFactory;
- private final MergeOptimizerFactory mergeOptimizerFactory;
- private final RulesFacadeFactory rulesFacadeFactory;
- private final OnlineWorkflowStartParameters startParameters;
-
+ private final SimulatorFactory simulatorFactory;
+ private final MontecarloSamplerFactory montecarloSamplerFactory;
+ private final MergeOptimizerFactory mergeOptimizerFactory;
+ private final RulesFacadeFactory rulesFacadeFactory;
+ private final OnlineWorkflowStartParameters startParameters;
+
private String id;
- public OnlineWorkflowImpl(
- ComputationManager computationManager,
- ContingenciesAndActionsDatabaseClient cadbClient,
- DynamicDatabaseClientFactory ddbClientFactory,
- HistoDbClient histoDbClient,
- RulesDbClient rulesDbClient,
+ public OnlineWorkflowImpl(
+ ComputationManager computationManager,
+ ContingenciesAndActionsDatabaseClient cadbClient,
+ DynamicDatabaseClientFactory ddbClientFactory,
+ HistoDbClient histoDbClient,
+ RulesDbClient rulesDbClient,
WCAFactory wcaFactory,
LoadFlowFactory loadFlowFactory,
- ForecastErrorsDataStorage feDataStorage,
- OnlineDb onlineDB,
+ ForecastErrorsDataStorage feDataStorage,
+ OnlineDb onlineDB,
UncertaintiesAnalyserFactory uncertaintiesAnalyserFactory,
CorrectiveControlOptimizerFactory optimizerFactory,
- SimulatorFactory simulatorFactory,
+ SimulatorFactory simulatorFactory,
CaseRepository caseRepository,
- MontecarloSamplerFactory montecarloSamplerFactory,
- MergeOptimizerFactory mergeOptimizerFactory,
- RulesFacadeFactory rulesFacadeFactory,
+ MontecarloSamplerFactory montecarloSamplerFactory,
+ MergeOptimizerFactory mergeOptimizerFactory,
+ RulesFacadeFactory rulesFacadeFactory,
OnlineWorkflowParameters parameters,
OnlineWorkflowStartParameters startParameters
- ) {
- Objects.requireNonNull(computationManager, "computation manager is null");
- Objects.requireNonNull(cadbClient, "contingencies and actions DB client is null");
- Objects.requireNonNull(ddbClientFactory, "dynamic DB client factory is null");
- Objects.requireNonNull(histoDbClient, "histo DB client is null");
- Objects.requireNonNull(rulesDbClient, "rules DB client is null");
+ ) {
+ Objects.requireNonNull(computationManager, "computation manager is null");
+ Objects.requireNonNull(cadbClient, "contingencies and actions DB client is null");
+ Objects.requireNonNull(ddbClientFactory, "dynamic DB client factory is null");
+ Objects.requireNonNull(histoDbClient, "histo DB client is null");
+ Objects.requireNonNull(rulesDbClient, "rules DB client is null");
Objects.requireNonNull(wcaFactory, "WCA factory is null");
Objects.requireNonNull(loadFlowFactory, "loadFlow factory is null");
- Objects.requireNonNull(feDataStorage, "forecast errors data storage is null");
- Objects.requireNonNull(onlineDB, "online db is null");
- Objects.requireNonNull(optimizerFactory, "corrective control optimizer factory is null");
- Objects.requireNonNull(simulatorFactory, "simulator factory is null");
- Objects.requireNonNull(caseRepository, "case repository is null");
- Objects.requireNonNull(montecarloSamplerFactory, "montecarlo sampler factory is null");
- Objects.requireNonNull(parameters, "parameters is null");
- Objects.requireNonNull(startParameters, "start parameters is null");
- this.computationManager = computationManager;
- this.cadbClient = cadbClient;
- this.ddbClientFactory = ddbClientFactory;
- this.histoDbClient = histoDbClient;
- this.rulesDbClient = rulesDbClient;
+ Objects.requireNonNull(feDataStorage, "forecast errors data storage is null");
+ Objects.requireNonNull(onlineDB, "online db is null");
+ Objects.requireNonNull(optimizerFactory, "corrective control optimizer factory is null");
+ Objects.requireNonNull(simulatorFactory, "simulator factory is null");
+ Objects.requireNonNull(caseRepository, "case repository is null");
+ Objects.requireNonNull(montecarloSamplerFactory, "montecarlo sampler factory is null");
+ Objects.requireNonNull(parameters, "parameters is null");
+ Objects.requireNonNull(startParameters, "start parameters is null");
+ this.computationManager = computationManager;
+ this.cadbClient = cadbClient;
+ this.ddbClientFactory = ddbClientFactory;
+ this.histoDbClient = histoDbClient;
+ this.rulesDbClient = rulesDbClient;
this.wcaFactory = wcaFactory;
this.loadFlowFactory = loadFlowFactory;
- this.feDataStorage = feDataStorage;
- this.onlineDb = onlineDB;
+ this.feDataStorage = feDataStorage;
+ this.onlineDb = onlineDB;
this.uncertaintiesAnalyserFactory = Objects.requireNonNull(uncertaintiesAnalyserFactory);
this.optimizerFactory = optimizerFactory;
this.simulatorFactory = simulatorFactory;
this.caseRepository = caseRepository;
- this.montecarloSamplerFactory = montecarloSamplerFactory;
- this.mergeOptimizerFactory = Objects.requireNonNull(mergeOptimizerFactory);
- this.rulesFacadeFactory = rulesFacadeFactory;
- this.parameters = parameters;
- this.startParameters = startParameters;
- this.id = DateTimeFormat.forPattern("yyyyMMdd_HHmm_").print(parameters.getBaseCaseDate())+new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
- logger.info(parameters.toString());
- }
-
-
- /* (non-Javadoc)
- * @see eu.itesla_project.online.OnlineWorkflowInterface#getId()
- */
- @Override
- public String getId() {
- return id;
- }
-
- /* (non-Javadoc)
- * @see eu.itesla_project.online.OnlineWorkflowInterface#start(eu.itesla_project.online.OnlineWorkflowContext)
- */
- @Override
- public void start(OnlineWorkflowContext oCtx) throws Exception {
- logger.info("{} Online workflow processing, started.",id);
- for (OnlineApplicationListener l :listeners)
- l.onWorkflowUpdate(new StatusSynthesis(id,StatusSynthesis.STATUS_RUNNING));
-
- Network network = MergeUtil.merge(caseRepository, parameters.getBaseCaseDate(), parameters.getCaseType(), parameters.getCountries(),
- loadFlowFactory, 0, mergeOptimizerFactory, computationManager, parameters.isMergeOptimized());
+ this.montecarloSamplerFactory = montecarloSamplerFactory;
+ this.mergeOptimizerFactory = Objects.requireNonNull(mergeOptimizerFactory);
+ this.rulesFacadeFactory = rulesFacadeFactory;
+ this.parameters = parameters;
+ this.startParameters = startParameters;
+ this.id = DateTimeFormat.forPattern("yyyyMMdd_HHmm_").print(parameters.getBaseCaseDate())+new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
+ logger.info(parameters.toString());
+ }
+
+
+ /* (non-Javadoc)
+ * @see eu.itesla_project.online.OnlineWorkflowInterface#getId()
+ */
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ /* (non-Javadoc)
+ * @see eu.itesla_project.online.OnlineWorkflowInterface#start(eu.itesla_project.online.OnlineWorkflowContext)
+ */
+ @Override
+ public void start(OnlineWorkflowContext oCtx) throws Exception {
+ logger.info("{} Online workflow processing, started.",id);
+ for (OnlineApplicationListener l :listeners)
+ l.onWorkflowUpdate(new StatusSynthesis(id,StatusSynthesis.STATUS_RUNNING));
+
+ Network network = MergeUtil.merge(caseRepository, parameters.getBaseCaseDate(), parameters.getCaseType(), parameters.getCountries(),
+ loadFlowFactory, 0, mergeOptimizerFactory, computationManager, parameters.isMergeOptimized());
logger.info("- Network id: " + network.getId());
logger.info("- Network name: "+ network.getName());
- // needed in order to correctly handle multithreading access to network
- network.getStateManager().allowStateMultiThreadAccess(true);
+ // needed in order to correctly handle multithreading access to network
+ network.getStateManager().allowStateMultiThreadAccess(true);
+
-
oCtx.setWorkflowId(id);
- oCtx.setNetwork(network);
+ oCtx.setNetwork(network);
oCtx.setOfflineWorkflowId(parameters.getOfflineWorkflowId());
oCtx.setTimeHorizon(parameters.getTimeHorizon());
-
+
// prepare the objects where the results of the forecast analysis will be saved
oCtx.setResults(new ForecastAnalysisResults(this.getId(), oCtx.getTimeHorizon()));
oCtx.setSecurityRulesResults(new SecurityRulesApplicationResults(this.getId(), oCtx.getTimeHorizon()));
oCtx.setWcaResults(new WCAResults(this.getId(), oCtx.getTimeHorizon()));
if ( parameters.validation() )
- oCtx.setWcaSecurityRulesResults(new SecurityRulesApplicationResults(this.getId(), oCtx.getTimeHorizon()));
+ oCtx.setWcaSecurityRulesResults(new SecurityRulesApplicationResults(this.getId(), oCtx.getTimeHorizon()));
- logger.info(" - WCA processing......");
+ logger.info(" - WCA processing......");
for (OnlineApplicationListener l :listeners)
l.onWcaUpdate(new RunningSynthesis(id,true));
WCAParameters wcaParameters = new WCAParameters(parameters.getHistoInterval(), parameters.getOfflineWorkflowId(), parameters.getSecurityIndexes(), parameters.getRulesPurityThreshold());
- WCA wca = wcaFactory.create(oCtx.getNetwork(), computationManager, histoDbClient, rulesDbClient, uncertaintiesAnalyserFactory, cadbClient, loadFlowFactory);
+ WCA wca = wcaFactory.create(oCtx.getNetwork(), computationManager, histoDbClient, rulesDbClient, uncertaintiesAnalyserFactory, cadbClient, loadFlowFactory);
WCAResult result = wca.run(wcaParameters);
- for (OnlineApplicationListener l :listeners)
- l.onWcaUpdate(new RunningSynthesis(id,false));
+ for (OnlineApplicationListener l :listeners)
+ l.onWcaUpdate(new RunningSynthesis(id,false));
+
+ // ArrayList stables = new ArrayList();
- // ArrayList stables = new ArrayList();
-
for (WCACluster cluster : result.getClusters()) {
- logger.info("WCA: contingency {} in cluster {}", cluster.getContingency().getId(), cluster.getNum().toString());
- oCtx.getWcaResults().addContingencyWithCluster(cluster.getContingency().getId(), cluster);
- if ( parameters.validation() ) { // if validation
- // do not filter out the contingencies
- oCtx.getContingenciesToAnalyze().add(cluster.getContingency());
- } else {
- if ( cluster.getNum() != WCAClusterNum.ONE ) { // cluster 1 -> contingency classified as "stable" -> no need for further analysis
- // contingencies in clusters 2, 3 and 4 need further analysis
- oCtx.getContingenciesToAnalyze().add(cluster.getContingency());
-
- }
- }
+ logger.info("WCA: contingency {} in cluster {}", cluster.getContingency().getId(), cluster.getNum().toString());
+ oCtx.getWcaResults().addContingencyWithCluster(cluster.getContingency().getId(), cluster);
+ if ( parameters.validation() ) { // if validation
+ // do not filter out the contingencies
+ oCtx.getContingenciesToAnalyze().add(cluster.getContingency());
+ } else {
+ if ( cluster.getNum() != WCAClusterNum.ONE ) { // cluster 1 -> contingency classified as "stable" -> no need for further analysis
+ // contingencies in clusters 2, 3 and 4 need further analysis
+ oCtx.getContingenciesToAnalyze().add(cluster.getContingency());
+
+ }
+ }
}
-
+
// notify all contingency stable and unstable
for (OnlineApplicationListener l :listeners)
- l.onWcaContingencies(new WcaContingenciesSynthesis(id, oCtx.getWcaResults().getContingenciesWithClusters()));
-
-
+ l.onWcaContingencies(new WcaContingenciesSynthesis(id, oCtx.getWcaResults().getContingenciesWithClusters()));
+
+
logger.info("{} Online workflow - Analysis of states, started.", id);
- // create modules used in the states analysis
- MontecarloSampler sampler = montecarloSamplerFactory.create(oCtx.getNetwork(), computationManager, feDataStorage);
- OnlineRulesFacade rulesFacade = rulesFacadeFactory.create(rulesDbClient);
- CorrectiveControlOptimizer optimizer = optimizerFactory.create(cadbClient,computationManager);
- Stabilization stabilization = simulatorFactory.createStabilization(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE, ddbClientFactory);
- ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE, cadbClient);
-
- // initialize modules
- sampler.init(new MontecarloSamplerParameters(oCtx.getTimeHorizon(), parameters.getFeAnalysisId(), parameters.getStates()));
- rulesFacade.init(new RulesFacadeParameters(oCtx.getOfflineWorkflowId(),
- oCtx.getContingenciesToAnalyze(),
- parameters.getRulesPurityThreshold(),
- parameters.getSecurityIndexes(),
- parameters.validation()));
- Map simulationInitContext = new HashMap<>();
+ // create modules used in the states analysis
+ MontecarloSampler sampler = montecarloSamplerFactory.create(oCtx.getNetwork(), computationManager, feDataStorage);
+ OnlineRulesFacade rulesFacade = rulesFacadeFactory.create(rulesDbClient);
+ CorrectiveControlOptimizer optimizer = optimizerFactory.create(cadbClient,computationManager);
+ Stabilization stabilization = simulatorFactory.createStabilization(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE, ddbClientFactory);
+ ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE, cadbClient);
+ LoadFlow loadflow = loadFlowFactory.create(oCtx.getNetwork(), computationManager, 0);
+ ConstraintsModifier constraintsModifier = new ConstraintsModifier(oCtx.getNetwork());
+ StateAnalizerListener stateListener = new StateAnalizerListener();
+
+ // initialize modules
+ rulesFacade.init(new RulesFacadeParameters(oCtx.getOfflineWorkflowId(),
+ oCtx.getContingenciesToAnalyze(),
+ parameters.getRulesPurityThreshold(),
+ parameters.getSecurityIndexes(),
+ parameters.validation(),
+ parameters.isHandleViolationsInN()));
+ Map simulationInitContext = new HashMap<>();
SimulationParameters simulationParameters = SimulationParameters.load();
stabilization.init(simulationParameters, simulationInitContext);
impactAnalysis.init(simulationParameters, simulationInitContext);
- optimizer.init(new CorrectiveControlOptimizerParameters());
-
- LoadFlow loadflow = loadFlowFactory.create(oCtx.getNetwork(), computationManager, 0);
- StateAnalizerListener stateListener = new StateAnalizerListener();
- // run states analysis
- List> tasks = new ArrayList<>(parameters.getStates());
- for ( int i=0; i> tasks = new ArrayList<>(statesNumber);
+ for ( int i=0; i statusMap = new HashMap();
WorkSynthesis work=new WorkSynthesis(id,statusMap);
ContingencyStatesActionsSynthesis acts=new ContingencyStatesActionsSynthesis(id);
- ContingencyStatesIndexesSynthesis stindex=new ContingencyStatesIndexesSynthesis(id);
- IndexSecurityRulesResultsSynthesis stateWithSecRulesResults = new IndexSecurityRulesResultsSynthesis(id);
-
+ ContingencyStatesIndexesSynthesis stindex=new ContingencyStatesIndexesSynthesis(id);
+ IndexSecurityRulesResultsSynthesis stateWithSecRulesResults = new IndexSecurityRulesResultsSynthesis(id);
+
public void onUpdate(Integer stateId, EnumMap status, TimeHorizon t) {
//statusMap.put(stateId, new WorkStatus(stateId, status, t.toString()));
-
- if (statusMap.containsKey(stateId)) {
- WorkStatus ws = statusMap.get(stateId);
- ws.setStatus(status);
- ws.setTimeHorizon(t.toString());
- statusMap.put(stateId,ws);
- } else
- statusMap.put(stateId,new WorkStatus(stateId, status,t.toString() ));
+
+ if (statusMap.containsKey(stateId)) {
+ WorkStatus ws = statusMap.get(stateId);
+ ws.setStatus(status);
+ ws.setTimeHorizon(t.toString());
+ statusMap.put(stateId,ws);
+ } else
+ statusMap.put(stateId,new WorkStatus(stateId, status,t.toString() ));
for (OnlineApplicationListener l : listeners)
l.onWorkflowStateUpdate(work);
}
-
- public void onSecurityRulesApplicationResults(String contingencyId, Integer stateId, OnlineWorkflowContext oCtx )
+
+ public void onSecurityRulesApplicationResults(String contingencyId, Integer stateId, OnlineWorkflowContext oCtx )
{
-
- SecurityRulesApplicationResults rulesApplicationResults = oCtx.getSecurityRulesResults();
- stateWithSecRulesResults.addStateSecurityRuleIndexes(contingencyId, stateId, rulesApplicationResults);
- for (OnlineApplicationListener l : listeners)
+
+ SecurityRulesApplicationResults rulesApplicationResults = oCtx.getSecurityRulesResults();
+ stateWithSecRulesResults.addStateSecurityRuleIndexes(contingencyId, stateId, rulesApplicationResults);
+ for (OnlineApplicationListener l : listeners)
l.onStatesWithSecurityRulesResultsUpdate(stateWithSecRulesResults);
}
-
+
public void onUpdate(Integer stateId, EnumMap status, TimeHorizon t, String detail) {
- // statusMap.put(stateId, new WorkStatus(stateId, status, t.toString(),detail));
- if (statusMap.containsKey(stateId)) {
- WorkStatus ws = statusMap.get(stateId);
- StringBuffer sb = new StringBuffer();
-
- if (ws.getDetail() != null && !ws.getDetail().equals(""))
- sb.append(ws.getDetail()).append("
").append(detail);
- else
- sb.append(detail);
-
- ws.setDetail(sb.toString());
- ws.setStatus(status);
- ws.setTimeHorizon(t.toString());
- statusMap.put(stateId,ws);
- } else
- statusMap.put(stateId,new WorkStatus(stateId, status,t.toString(),detail ));
-
+ // statusMap.put(stateId, new WorkStatus(stateId, status, t.toString(),detail));
+ if (statusMap.containsKey(stateId)) {
+ WorkStatus ws = statusMap.get(stateId);
+ StringBuffer sb = new StringBuffer();
+
+ if (ws.getDetail() != null && !ws.getDetail().equals(""))
+ sb.append(ws.getDetail()).append("
").append(detail);
+ else
+ sb.append(detail);
+
+ ws.setDetail(sb.toString());
+ ws.setStatus(status);
+ ws.setTimeHorizon(t.toString());
+ statusMap.put(stateId,ws);
+ } else
+ statusMap.put(stateId,new WorkStatus(stateId, status,t.toString(),detail ));
+
for (OnlineApplicationListener l : listeners)
l.onWorkflowStateUpdate(work);
}
-
-
- public void onImpactAnalysisResults(Integer stateId, OnlineWorkflowContext oCtx ) {
-
- ForecastAnalysisResults res = oCtx.getResults();
- Collection unsafes= res.getUnsafeContingencies();
- for(String c :unsafes)
- {
-
- List sts =res.getUnstableStates(c);
- for(Integer s:sts)
- {
- List sec=res.getIndexes(c, s);
- ArrayList indexes=new ArrayList();
- for(SecurityIndex idx: sec)
- {
- indexes.add(stindex.new SecurityIndexInfo(idx));
- }
- stindex.addStateIndexes(c,s, indexes);
- }
-
- }
-
- for (OnlineApplicationListener l :listeners)
- l.onStatesWithIndexesUpdate(stindex);
-
-
- }
-
- public void onOptimizerResults(Integer stateId, OnlineWorkflowContext oCtx ) {
-
- ForecastAnalysisResults res = oCtx.getResults();
- Collection conts =res.getContingenciesWithActions();
-
-
- for(String c :conts)
- {
- Map unsafeStatesWithActions = res.getUnsafeStatesWithActions(c);
- if ( unsafeStatesWithActions != null ) {
- Set sts =unsafeStatesWithActions.keySet();
- for(Integer s:sts)
- {
- List actiondIds = res.getActionsIds(c, s);
- if ( actiondIds != null ) {
- ArrayList infos=new ArrayList();
- for(String a : actiondIds)
- infos.add(new ActionInfo(a));
- acts.addStateActions(c,s, infos);
- }
- }
- }
- }
- for (OnlineApplicationListener l :listeners)
- l.onStatesWithActionsUpdate(acts);
-
-
- }
+
+
+ public void onImpactAnalysisResults(Integer stateId, OnlineWorkflowContext oCtx ) {
+
+ ForecastAnalysisResults res = oCtx.getResults();
+ Collection unsafes= res.getUnsafeContingencies();
+ for(String c :unsafes)
+ {
+
+ List sts =res.getUnstableStates(c);
+ for(Integer s:sts)
+ {
+ List sec=res.getIndexes(c, s);
+ ArrayList indexes=new ArrayList();
+ for(SecurityIndex idx: sec)
+ {
+ indexes.add(stindex.new SecurityIndexInfo(idx));
+ }
+ stindex.addStateIndexes(c,s, indexes);
+ }
+
+ }
+
+ for (OnlineApplicationListener l :listeners)
+ l.onStatesWithIndexesUpdate(stindex);
+
+
+ }
+
+ public void onOptimizerResults(Integer stateId, OnlineWorkflowContext oCtx ) {
+
+ ForecastAnalysisResults res = oCtx.getResults();
+ Collection conts =res.getContingenciesWithActions();
+
+
+ for(String c :conts)
+ {
+ Map unsafeStatesWithActions = res.getUnsafeStatesWithActions(c);
+ if ( unsafeStatesWithActions != null ) {
+ Set sts =unsafeStatesWithActions.keySet();
+ for(Integer s:sts)
+ {
+ List actiondIds = res.getActionsIds(c, s);
+ if ( actiondIds != null ) {
+ ArrayList infos=new ArrayList();
+ for(String a : actiondIds)
+ infos.add(new ActionInfo(a));
+ acts.addStateActions(c,s, infos);
+ }
+ }
+ }
+ }
+ for (OnlineApplicationListener l :listeners)
+ l.onStatesWithActionsUpdate(acts);
+
+
+ }
}
}
diff --git a/online-workflow/src/main/java/eu/itesla_project/online/SecurityRulesApplicationResults.java b/online-workflow/src/main/java/eu/itesla_project/online/SecurityRulesApplicationResults.java
index ab23ba1b..935d8fdb 100644
--- a/online-workflow/src/main/java/eu/itesla_project/online/SecurityRulesApplicationResults.java
+++ b/online-workflow/src/main/java/eu/itesla_project/online/SecurityRulesApplicationResults.java
@@ -1,5 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
+ * Copyright (c) 2016, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -22,139 +23,170 @@
* @author Quinary
*/
public class SecurityRulesApplicationResults implements OnlineWorkflowRulesResults {
-
- // the id of the workflow where the rules have been applied
- private final String workflowId;
- // the time horizon used for the workflow where the rules have been applied
- private final TimeHorizon timeHorizon;
-
- // >
- private Map contingenciesWithSecurityRulesResults = new HashMap();
-
- public SecurityRulesApplicationResults(String workflowId, TimeHorizon timeHorizon) {
- this.workflowId = workflowId;
- this.timeHorizon = timeHorizon;
- }
-
- @Override
- public String getWorkflowId() {
- return workflowId;
- }
-
- @Override
- public TimeHorizon getTimeHorizon() {
- return timeHorizon;
- }
-
-
- public void addStateWithSecurityRulesResults(String contingencyId, Integer stateId, StateStatus stateStatus, Map securityRulesResults) {
- StatesWithSecurityRulesResults statesWithSecurityRulesResults = new StatesWithSecurityRulesResults();
- if ( contingenciesWithSecurityRulesResults.containsKey(contingencyId) ) {
- statesWithSecurityRulesResults = contingenciesWithSecurityRulesResults.get(contingencyId);
- }
- statesWithSecurityRulesResults.addState(stateId, stateStatus, securityRulesResults);
- contingenciesWithSecurityRulesResults.put(contingencyId, statesWithSecurityRulesResults);
-
- }
-
- @Override
- public Set getContingenciesWithSecurityRulesResults() {
- return contingenciesWithSecurityRulesResults.keySet();
- }
-
- public List getStatesWithSecurityRulesResults(String contingencyId) {
- List statesWithSecurityRulesResults = new ArrayList();
- for(Integer stateId : contingenciesWithSecurityRulesResults.get(contingencyId).getStates()) {
- statesWithSecurityRulesResults.add(stateId);
- }
- return statesWithSecurityRulesResults;
- }
-
- public Map getSecurityRulesResults(String contingencyId, Integer stateId) {
- return contingenciesWithSecurityRulesResults.get(contingencyId).getSecurityRulesResults(stateId);
- }
-
- @Override
- public Map getStateResults(String contingencyId, Integer stateId) {
- Map securityRulesResults = getSecurityRulesResults(contingencyId, stateId);
- Map stateResults = new HashMap();
- for(SecurityIndexType index : securityRulesResults.keySet()) {
- switch (securityRulesResults.get(index)) {
- case SAFE:
- stateResults.put(index.getLabel(), true);
- break;
- case UNSAFE:
- stateResults.put(index.getLabel(), false);
- break;
- default:
- break;
- }
- }
- return stateResults;
- }
-
- @Override
- public StateStatus getStateStatus(String contingencyId, Integer stateId) {
- return contingenciesWithSecurityRulesResults.get(contingencyId).getStateStatus(stateId);
- }
-
- private StatesWithSecurityRulesResults getStatesWithSecurityRulesResults_int(String contingencyId) {
- return contingenciesWithSecurityRulesResults.get(contingencyId);
- }
-
- public String toString() {
- String output = "time horizon: "+ timeHorizon.getName();
- output += "\n" + "contingencies with security rules results: " + getContingenciesWithSecurityRulesResults();
- for(String contingencyId : getContingenciesWithSecurityRulesResults() )
- output += "\n[contingecy id = " + contingencyId + ", states = " + getStatesWithSecurityRulesResults_int(contingencyId) + "]";
- return output;
- }
-
- private class StatesWithSecurityRulesResults {
-
- //
- Map> states = new HashMap>();
- //
- Map statesStatus = new HashMap();
-
- public boolean addState(Integer stateId, StateStatus stateStatus, Map securityRulesResults) {
- boolean added = false;
- if ( !states.containsKey(stateId) && !statesStatus.containsKey(stateId) ) {
- states.put(stateId, securityRulesResults);
- statesStatus.put(stateId, stateStatus);
- added = true;
- }
- return added;
- }
-
- public Set getStates() {
- return states.keySet();
- }
-
- public Map getSecurityRulesResults(Integer stateId) {
- return states.get(stateId);
- }
-
- public StateStatus getStateStatus(Integer stateId) {
- return statesStatus.get(stateId);
- }
-
- public String toString() {
- String output = "";
- for(Integer stateId : getStates()) {
- output += "[stateId " + stateId + ", " + getStateStatus(stateId) + ", rules = " + securityRulesResultsToString(getSecurityRulesResults(stateId)) + "]";
- }
- return output;
- }
-
- private String securityRulesResultsToString(Map securityRulesResults) {
- String results = "[";
- for (SecurityIndexType securityIndexType : securityRulesResults.keySet()) {
- results += "[" + securityIndexType.getLabel().replaceAll(" ", "_") + " " + securityRulesResults.get(securityIndexType) + "]";
- }
- results += " ]";
- return results;
- }
- }
+
+ // the id of the workflow where the rules have been applied
+ private final String workflowId;
+ // the time horizon used for the workflow where the rules have been applied
+ private final TimeHorizon timeHorizon;
+
+ // >
+ private Map contingenciesWithSecurityRulesResults = new HashMap();
+
+ public SecurityRulesApplicationResults(String workflowId, TimeHorizon timeHorizon) {
+ this.workflowId = workflowId;
+ this.timeHorizon = timeHorizon;
+ }
+
+ @Override
+ public String getWorkflowId() {
+ return workflowId;
+ }
+
+ @Override
+ public TimeHorizon getTimeHorizon() {
+ return timeHorizon;
+ }
+
+
+ public void addStateWithSecurityRulesResults(String contingencyId, Integer stateId, StateStatus stateStatus, Map securityRulesResults,
+ boolean rulesAvailable, List invalidRules) {
+ StatesWithSecurityRulesResults statesWithSecurityRulesResults = new StatesWithSecurityRulesResults();
+ if ( contingenciesWithSecurityRulesResults.containsKey(contingencyId) ) {
+ statesWithSecurityRulesResults = contingenciesWithSecurityRulesResults.get(contingencyId);
+ }
+ statesWithSecurityRulesResults.addState(stateId, stateStatus, securityRulesResults, rulesAvailable, invalidRules);
+ contingenciesWithSecurityRulesResults.put(contingencyId, statesWithSecurityRulesResults);
+ }
+
+ @Override
+ public Set getContingenciesWithSecurityRulesResults() {
+ return contingenciesWithSecurityRulesResults.keySet();
+ }
+
+ public List getStatesWithSecurityRulesResults(String contingencyId) {
+ List statesWithSecurityRulesResults = new ArrayList();
+ for(Integer stateId : contingenciesWithSecurityRulesResults.get(contingencyId).getStates()) {
+ statesWithSecurityRulesResults.add(stateId);
+ }
+ return statesWithSecurityRulesResults;
+ }
+
+ public Map getSecurityRulesResults(String contingencyId, Integer stateId) {
+ return contingenciesWithSecurityRulesResults.get(contingencyId).getSecurityRulesResults(stateId);
+ }
+
+ @Override
+ public Map getStateResults(String contingencyId, Integer stateId) {
+ Map securityRulesResults = getSecurityRulesResults(contingencyId, stateId);
+ Map stateResults = new HashMap();
+ for(SecurityIndexType index : securityRulesResults.keySet()) {
+ switch (securityRulesResults.get(index)) {
+ case SAFE:
+ stateResults.put(index.getLabel(), true);
+ break;
+ case UNSAFE:
+ stateResults.put(index.getLabel(), false);
+ break;
+ default:
+ break;
+ }
+ }
+ return stateResults;
+ }
+
+ @Override
+ public StateStatus getStateStatus(String contingencyId, Integer stateId) {
+ return contingenciesWithSecurityRulesResults.get(contingencyId).getStateStatus(stateId);
+ }
+
+ private StatesWithSecurityRulesResults getStatesWithSecurityRulesResults_int(String contingencyId) {
+ return contingenciesWithSecurityRulesResults.get(contingencyId);
+ }
+
+ @Override
+ public boolean areValidRulesAvailable(String contingencyId, Integer stateId) {
+ boolean rulesAvailable = false;
+ if ( contingenciesWithSecurityRulesResults.containsKey(contingencyId) )
+ rulesAvailable = contingenciesWithSecurityRulesResults.get(contingencyId).getRulesAvailability(stateId);
+ return rulesAvailable;
+ }
+
+ @Override
+ public List getInvalidRules(String contingencyId, Integer stateId) {
+ List invalidRules = new ArrayList();
+ if ( contingenciesWithSecurityRulesResults.containsKey(contingencyId) )
+ invalidRules = contingenciesWithSecurityRulesResults.get(contingencyId).getInvalidRules(stateId);
+ return invalidRules;
+ }
+
+ public String toString() {
+ String output = "time horizon: "+ timeHorizon.getName();
+ output += "\n" + "contingencies with security rules results: " + getContingenciesWithSecurityRulesResults();
+ for(String contingencyId : getContingenciesWithSecurityRulesResults() )
+ output += "\n[contingecy id = " + contingencyId + ", states = " + getStatesWithSecurityRulesResults_int(contingencyId) + "]";
+ return output;
+ }
+
+ private class StatesWithSecurityRulesResults {
+
+ //
+ Map> states = new HashMap>();
+ //
+ Map statesStatus = new HashMap();
+ //
+ Map statesWithRules = new HashMap();
+ //
+ Map> statesWithInvalidRules = new HashMap>();
+
+ boolean addState(Integer stateId, StateStatus stateStatus, Map securityRulesResults,
+ Boolean rulesAvailable, List invalidRules) {
+ boolean added = false;
+ if ( !states.containsKey(stateId) && !statesStatus.containsKey(stateId) ) {
+ states.put(stateId, securityRulesResults);
+ statesStatus.put(stateId, stateStatus);
+ statesWithRules.put(stateId, rulesAvailable);
+ statesWithInvalidRules.put(stateId, invalidRules);
+ added = true;
+ }
+ return added;
+ }
+
+ Set getStates() {
+ return states.keySet();
+ }
+
+ Map getSecurityRulesResults(Integer stateId) {
+ return states.get(stateId);
+ }
+
+ StateStatus getStateStatus(Integer stateId) {
+ return statesStatus.get(stateId);
+ }
+
+ boolean getRulesAvailability(Integer stateId) {
+ return statesWithRules.get(stateId);
+ }
+
+ List getInvalidRules(Integer stateId) {
+ return statesWithInvalidRules.get(stateId);
+ }
+
+ public String toString() {
+ String output = "";
+ for(Integer stateId : getStates()) {
+ output += "[stateId " + stateId + ", " + getStateStatus(stateId) + ", rules = " + securityRulesResultsToString(getSecurityRulesResults(stateId)) + "]";
+ }
+ return output;
+ }
+
+ private String securityRulesResultsToString(Map securityRulesResults) {
+ String results = "[";
+ for (SecurityIndexType securityIndexType : securityRulesResults.keySet()) {
+ results += "[" + securityIndexType.getLabel().replaceAll(" ", "_") + " " + securityRulesResults.get(securityIndexType) + "]";
+ }
+ results += " ]";
+ return results;
+ }
+ }
}
diff --git a/online-workflow/src/main/java/eu/itesla_project/online/StateAnalyzer.java b/online-workflow/src/main/java/eu/itesla_project/online/StateAnalyzer.java
index 02cf8d64..ad9240be 100644
--- a/online-workflow/src/main/java/eu/itesla_project/online/StateAnalyzer.java
+++ b/online-workflow/src/main/java/eu/itesla_project/online/StateAnalyzer.java
@@ -30,6 +30,7 @@
import eu.itesla_project.iidm.network.StateManager;
import eu.itesla_project.loadflow.api.LoadFlow;
import eu.itesla_project.loadflow.api.LoadFlowResult;
+import eu.itesla_project.modules.constraints.ConstraintsModifier;
import eu.itesla_project.modules.contingencies.ActionParameters;
import eu.itesla_project.modules.contingencies.Contingency;
import eu.itesla_project.modules.mcla.MontecarloSampler;
@@ -60,193 +61,217 @@
*/
public class StateAnalyzer implements Callable {
- Logger logger = LoggerFactory.getLogger(StateAnalyzer.class);
+ Logger logger = LoggerFactory.getLogger(StateAnalyzer.class);
- private OnlineWorkflowContext context;
- private MontecarloSampler sampler;
- private LoadFlow loadFlow;
- private OnlineRulesFacade rulesFacade;
- private CorrectiveControlOptimizer optimizer;
- private Stabilization stabilization;
- private ImpactAnalysis impactAnalysis;
- private OnlineDb onlineDb;
- private Integer stateId;
- private OnlineWorkflowParameters parameters;
- private StateAnalizerListener stateListener;
- private EnumMap status=new EnumMap(OnlineTaskType.class);
- Map loadflowResults = new HashMap();
+ private OnlineWorkflowContext context;
+ private MontecarloSampler sampler;
+ private LoadFlow loadFlow;
+ private OnlineRulesFacade rulesFacade;
+ private CorrectiveControlOptimizer optimizer;
+ private Stabilization stabilization;
+ private ImpactAnalysis impactAnalysis;
+ private OnlineDb onlineDb;
+ private Integer stateId;
+ private OnlineWorkflowParameters parameters;
+ private StateAnalizerListener stateListener;
+ private EnumMap status=new EnumMap(OnlineTaskType.class);
+ Map loadflowResults = new HashMap();
+ private ConstraintsModifier constraintsModifier;
- public StateAnalyzer(OnlineWorkflowContext context, MontecarloSampler sampler, LoadFlow loadFlow,
- OnlineRulesFacade rulesFacade, CorrectiveControlOptimizer optimizer, Stabilization stabilization,
- ImpactAnalysis impactAnalysis, OnlineDb onlineDb, StateAnalizerListener stateListener, OnlineWorkflowParameters parameters) {
- this.context = context;
- this.sampler = sampler;
- this.loadFlow = loadFlow;
- this.rulesFacade = rulesFacade;
- this.optimizer = optimizer;
- this.stabilization = stabilization;
- this.impactAnalysis = impactAnalysis;
- this.onlineDb = onlineDb;
- this.stateListener=stateListener;
- this.parameters = parameters;
- //stateId = "STATE-" + context.incrementStateCounter();
- stateId = context.incrementStateCounter();
- initStatus();
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- }
+ public StateAnalyzer(OnlineWorkflowContext context, MontecarloSampler sampler, LoadFlow loadFlow,
+ OnlineRulesFacade rulesFacade, CorrectiveControlOptimizer optimizer, Stabilization stabilization,
+ ImpactAnalysis impactAnalysis, OnlineDb onlineDb, StateAnalizerListener stateListener, ConstraintsModifier constraintsModifier,
+ OnlineWorkflowParameters parameters) {
+ this.context = context;
+ this.sampler = sampler;
+ this.loadFlow = loadFlow;
+ this.rulesFacade = rulesFacade;
+ this.optimizer = optimizer;
+ this.stabilization = stabilization;
+ this.impactAnalysis = impactAnalysis;
+ this.onlineDb = onlineDb;
+ this.stateListener=stateListener;
+ this.constraintsModifier = constraintsModifier;
+ this.parameters = parameters;
+ //stateId = "STATE-" + context.incrementStateCounter();
+ stateId = context.incrementStateCounter();
+ initStatus();
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ }
- private void initStatus(){
- status.put(OnlineTaskType.SAMPLING, OnlineTaskStatus.IDLE);
- status.put(OnlineTaskType.LOAD_FLOW, OnlineTaskStatus.IDLE);
- status.put(OnlineTaskType.SECURITY_RULES, OnlineTaskStatus.IDLE);
- status.put(OnlineTaskType.OPTIMIZER, OnlineTaskStatus.IDLE);
- status.put(OnlineTaskType.TIME_DOMAIN_SIM, OnlineTaskStatus.IDLE);
- }
-
- @Override
- public Void call() throws Exception {
- OnlineTaskType currentStatus=OnlineTaskType.SAMPLING;
-
- try {
- // create new state
- logger.info("Analyzing state {}", stateId);
- String stateIdStr=String.valueOf(stateId);
- context.getNetwork().getStateManager().cloneState(StateManager.INITIAL_STATE_ID, stateIdStr);
- context.getNetwork().getStateManager().setWorkingState(stateIdStr);
- // sample
- logger.info("{}: sampling started", stateId);
- status.put(currentStatus, OnlineTaskStatus.RUNNING);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- if ( !parameters.analyseBasecase() || stateId > 0 )
- sampler.sample();
- else
- logger.info("{}: state = basecase", stateId);
- status.put(currentStatus, OnlineTaskStatus.SUCCESS);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- logger.info("{}: sampling terminated", stateId);
-
- // complete state with loadflow
- currentStatus=OnlineTaskType.LOAD_FLOW;
- status.put(currentStatus, OnlineTaskStatus.RUNNING);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- logger.info("{}: loadflow started", stateId);
+ private void initStatus(){
+ status.put(OnlineTaskType.SAMPLING, OnlineTaskStatus.IDLE);
+ status.put(OnlineTaskType.LOAD_FLOW, OnlineTaskStatus.IDLE);
+ status.put(OnlineTaskType.SECURITY_RULES, OnlineTaskStatus.IDLE);
+ status.put(OnlineTaskType.OPTIMIZER, OnlineTaskStatus.IDLE);
+ status.put(OnlineTaskType.TIME_DOMAIN_SIM, OnlineTaskStatus.IDLE);
+ }
+
+ @Override
+ public Void call() throws Exception {
+ OnlineTaskType currentStatus=OnlineTaskType.SAMPLING;
+
+ try {
+ // create new state
+ logger.info("Analyzing state {}", stateId);
+ String stateIdStr=String.valueOf(stateId);
+ context.getNetwork().getStateManager().cloneState(StateManager.INITIAL_STATE_ID, stateIdStr);
+ context.getNetwork().getStateManager().setWorkingState(stateIdStr);
+ // sample
+ logger.info("{}: sampling started", stateId);
+ status.put(currentStatus, OnlineTaskStatus.RUNNING);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ if ( !parameters.analyseBasecase() || stateId > 0 )
+ sampler.sample();
+ else
+ logger.info("{}: state = basecase", stateId);
+ status.put(currentStatus, OnlineTaskStatus.SUCCESS);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ logger.info("{}: sampling terminated", stateId);
+
+ // complete state with loadflow
+ currentStatus=OnlineTaskType.LOAD_FLOW;
+ status.put(currentStatus, OnlineTaskStatus.RUNNING);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ logger.info("{}: loadflow started", stateId);
LoadFlowResult result = loadFlow.run();
- status.put(currentStatus, result.isOk()?OnlineTaskStatus.SUCCESS:OnlineTaskStatus.FAILED);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- logger.info("{}: loadflow terminated", stateId);
- if ( result.getMetrics() != null ) {
- logger.info("{}: loadflow metrics: {}", stateId, result.getMetrics());
- if ( !result.getMetrics().isEmpty() )
- onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.LOAD_FLOW, result.getMetrics());
- }
- status.put(currentStatus, result.isOk()?OnlineTaskStatus.SUCCESS:OnlineTaskStatus.FAILED);
-
- if ( parameters.storeStates() ) {
- logger.info("{}: storing state in online db", stateId);
- onlineDb.storeState(context.getWorkflowId(), stateId, context.getNetwork());
- }
-
- if ( result.isOk() ) {
- // stores violations only if loadflow converges
- logger.info("{}: storing violations after {} in online db", stateId, OnlineStep.LOAD_FLOW);
- List violations = Security.checkLimits(context.getNetwork(), CurrentLimitType.PATL, Integer.MAX_VALUE, parameters.getLimitReduction());
- if ( violations != null && !violations.isEmpty() )
- onlineDb.storeViolations(context.getWorkflowId(), stateId, OnlineStep.LOAD_FLOW, violations);
- else
- logger.info("{}: no violations after {}", stateId, OnlineStep.LOAD_FLOW);
+ status.put(currentStatus, result.isOk()?OnlineTaskStatus.SUCCESS:OnlineTaskStatus.FAILED);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ logger.info("{}: loadflow terminated", stateId);
+ if ( result.getMetrics() != null ) {
+ logger.info("{}: loadflow metrics: {}", stateId, result.getMetrics());
+ if ( !result.getMetrics().isEmpty() )
+ onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.LOAD_FLOW, result.getMetrics());
+ }
+ status.put(currentStatus, result.isOk()?OnlineTaskStatus.SUCCESS:OnlineTaskStatus.FAILED);
+
+ if ( parameters.storeStates() ) {
+ logger.info("{}: storing state in online db", stateId);
+ onlineDb.storeState(context.getWorkflowId(), stateId, context.getNetwork());
+ }
-
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- // check state against contingencies
- boolean isStateSafe = true;
- List contingenciesForOptimizer = new ArrayList();
- List contingenciesForSimulator = new ArrayList();
- currentStatus=OnlineTaskType.SECURITY_RULES;
- status.put(currentStatus, OnlineTaskStatus.RUNNING);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
-
- for (Contingency contingency : context.getContingenciesToAnalyze()) {
- logger.info("{}: check security rules against contingency {}", stateId, contingency.getId());
- RulesFacadeResults rulesResults = rulesFacade.evaluate(contingency, context.getNetwork());
- if ( rulesResults.getStateStatus() == StateStatus.SAFE ) { // check if this contingency is ok
- logger.info("{}: is safe for contingency {}", stateId, contingency.getId());
- if ( parameters.validation() ) { // if validation
- // send all [contingency,state] pairs to simulation
- contingenciesForSimulator.add(contingency);
- // send safe [contingency,state] pairs to optimizer
- contingenciesForOptimizer.add(contingency);
- }
- } else if( rulesResults.getStateStatus() == StateStatus.SAFE_WITH_CORRECTIVE_ACTIONS ) { // check if this contingency could be ok with corrective actions
- logger.info("{}: requires corrective actions for contingency {}", stateId, contingency.getId());
- isStateSafe = false;
- contingenciesForOptimizer.add(contingency);
- if ( parameters.validation() ) { // if validation
- // send all [contingency,state] pairs to simulation
- contingenciesForSimulator.add(contingency);
- }
- } else { // we need to perform a time-domain simulation on this state for this contingency
- logger.info("{}: requires time-domain simulation for contingency {}", stateId, contingency.getId());
- isStateSafe = false;
- contingenciesForSimulator.add(contingency);
- }
+ if ( result.isOk() ) {
+ // stores violations only if loadflow converges
+ logger.info("{}: storing violations after {} in online db", stateId, OnlineStep.LOAD_FLOW);
+ List violations = Security.checkLimits(context.getNetwork(),
+ CurrentLimitType.PATL,
+ Integer.MAX_VALUE,
+ parameters.getLimitReduction());
+ if ( violations != null && !violations.isEmpty() ) {
+ onlineDb.storeViolations(context.getWorkflowId(), stateId, OnlineStep.LOAD_FLOW, violations);
+ if ( parameters.isHandleViolationsInN() ) {
+ if ( parameters.analyseBasecase() && stateId == 0 ) {
+ constraintsModifier.looseConstraints(stateIdStr,
+ violations,
+ parameters.getConstraintMargin(),
+ true); // loose constraints on state 0 (=basecase)
+ } else {
+ constraintsModifier.looseConstraints(stateIdStr, violations); // loose constraints on sampled state
+ }
+ }
+ } else
+ logger.info("{}: no violations after {}", stateId, OnlineStep.LOAD_FLOW);
- synchronized (context.getSecurityRulesResults()) {
- context.getSecurityRulesResults().addStateWithSecurityRulesResults(contingency.getId(), stateId, rulesResults.getStateStatus(), rulesResults.getIndexesResults());
- stateListener.onSecurityRulesApplicationResults(contingency.getId(),stateId, context);
- }
-
- if ( parameters.validation() ) {
- RulesFacadeResults wcaRulesResults = rulesFacade.wcaEvaluate(contingency, context.getNetwork());
- synchronized (context.getWcaSecurityRulesResults()) {
- context.getWcaSecurityRulesResults().addStateWithSecurityRulesResults(contingency.getId(), stateId, wcaRulesResults.getStateStatus(), wcaRulesResults.getIndexesResults());
- }
- }
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ // check state against contingencies
+ boolean isStateSafe = true;
+ List contingenciesForOptimizer = new ArrayList();
+ List contingenciesForSimulator = new ArrayList();
+ currentStatus=OnlineTaskType.SECURITY_RULES;
+ status.put(currentStatus, OnlineTaskStatus.RUNNING);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+
+ for (Contingency contingency : context.getContingenciesToAnalyze()) {
+ logger.info("{}: check security rules against contingency {}", stateId, contingency.getId());
+ RulesFacadeResults rulesResults = rulesFacade.evaluate(contingency, context.getNetwork());
+ if ( rulesResults.areRulesAvailable() ) {
+ if ( rulesResults.getStateStatus() == StateStatus.SAFE ) { // check if this contingency is ok
+ logger.info("{}: is safe for contingency {}", stateId, contingency.getId());
+ if ( parameters.validation() ) { // if validation
+ // send all [contingency,state] pairs to simulation
+ contingenciesForSimulator.add(contingency);
+ // send safe [contingency,state] pairs to optimizer
+ contingenciesForOptimizer.add(contingency);
+ }
+ } else if( rulesResults.getStateStatus() == StateStatus.SAFE_WITH_CORRECTIVE_ACTIONS ) { // check if this contingency could be ok with corrective actions
+ logger.info("{}: requires corrective actions for contingency {}", stateId, contingency.getId());
+ isStateSafe = false;
+ contingenciesForOptimizer.add(contingency);
+ if ( parameters.validation() ) { // if validation
+ // send all [contingency,state] pairs to simulation
+ contingenciesForSimulator.add(contingency);
+ }
+ } else { // we need to perform a time-domain simulation on this state for this contingency
+ logger.info("{}: requires time-domain simulation for contingency {}", stateId, contingency.getId());
+ isStateSafe = false;
+ contingenciesForSimulator.add(contingency);
+ }
+ } else {
+ logger.warn("{}: no valid rules for contingency {}", stateId, contingency.getId());
+ contingenciesForSimulator.add(contingency);
+ }
+
+ synchronized (context.getSecurityRulesResults()) {
+ context.getSecurityRulesResults().addStateWithSecurityRulesResults(contingency.getId(), stateId, rulesResults.getStateStatus(),
+ rulesResults.getIndexesResults(), rulesResults.areRulesAvailable(),
+ rulesResults.getInvalidRules());
+ stateListener.onSecurityRulesApplicationResults(contingency.getId(),stateId, context);
+ }
+
+ if ( parameters.validation() ) {
+ RulesFacadeResults wcaRulesResults = rulesFacade.wcaEvaluate(contingency, context.getNetwork());
+ synchronized (context.getWcaSecurityRulesResults()) {
+ context.getWcaSecurityRulesResults().addStateWithSecurityRulesResults(contingency.getId(), stateId, wcaRulesResults.getStateStatus(),
+ wcaRulesResults.getIndexesResults(), rulesResults.areRulesAvailable(),
+ rulesResults.getInvalidRules());
+ }
+ }
}
- status.put(currentStatus, OnlineTaskStatus.SUCCESS);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- computeAndStorePostContingencyViolations(context.getNetwork(), context.getContingenciesToAnalyze());
- if ( isStateSafe && !parameters.validation() ) {
- // state is safe: stop analysis and destroy the state
- logger.info("{}: is safe for every contingency: stopping analysis", stateId);
- //context.getNetwork().getStateManager().removeState(stateIdStr); // the state is still needed
- return null;
- } else {
- if ( contingenciesForOptimizer.size() > 0 ) {
- // perform corrective control optimization
- currentStatus=OnlineTaskType.OPTIMIZER;
- status.put(currentStatus, OnlineTaskStatus.RUNNING);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- logger.info("{}: corrective control optimization started - working on {} contingencies", stateId, contingenciesForOptimizer.size());
- runOptimizer(context.getNetwork(), contingenciesForOptimizer, contingenciesForSimulator, context.getResults());
- // the optimizer could possibly have changed the network working state: set the original one
- context.getNetwork().getStateManager().setWorkingState(stateIdStr);
- stateListener.onOptimizerResults(stateId,context);
- logger.info("{}: corrective control optimization terminated", stateId);
- status.put(OnlineTaskType.OPTIMIZER, OnlineTaskStatus.SUCCESS);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- }
- if ( contingenciesForSimulator.size() > 0 ) {
- // perform time-domain simulation
- currentStatus=OnlineTaskType.TIME_DOMAIN_SIM;
- status.put(currentStatus, OnlineTaskStatus.RUNNING);
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- logger.info("{}: time-domain simulation started - working on {} contingencies", stateId, contingenciesForSimulator.size());
- logger.info("{}: stabilization started", stateId);
- StabilizationResult stabilizationResult = stabilization.run();
- logger.info("{}: stabilization terminated", stateId);
- if ( stabilizationResult.getMetrics() != null ) {
- logger.info("{}: stabilization metrics: {}", stateId, stabilizationResult.getMetrics());
- if ( !stabilizationResult.getMetrics().isEmpty() )
- onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.STABILIZATION, stabilizationResult.getMetrics());
- }
+ status.put(currentStatus, OnlineTaskStatus.SUCCESS);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ computeAndStorePostContingencyViolations(context.getNetwork(), context.getContingenciesToAnalyze());
+ if ( isStateSafe && !parameters.validation() ) {
+ // state is safe: stop analysis and destroy the state
+ logger.info("{}: is safe for every contingency: stopping analysis", stateId);
+ //context.getNetwork().getStateManager().removeState(stateIdStr); // the state is still needed
+ return null;
+ } else {
+ if ( contingenciesForOptimizer.size() > 0 ) {
+ // perform corrective control optimization
+ currentStatus=OnlineTaskType.OPTIMIZER;
+ status.put(currentStatus, OnlineTaskStatus.RUNNING);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ logger.info("{}: corrective control optimization started - working on {} contingencies", stateId, contingenciesForOptimizer.size());
+ runOptimizer(context.getNetwork(), contingenciesForOptimizer, contingenciesForSimulator, context.getResults());
+ // the optimizer could possibly have changed the network working state: set the original one
+ context.getNetwork().getStateManager().setWorkingState(stateIdStr);
+ stateListener.onOptimizerResults(stateId,context);
+ logger.info("{}: corrective control optimization terminated", stateId);
+ status.put(OnlineTaskType.OPTIMIZER, OnlineTaskStatus.SUCCESS);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ }
+ if ( contingenciesForSimulator.size() > 0 ) {
+ // perform time-domain simulation
+ currentStatus=OnlineTaskType.TIME_DOMAIN_SIM;
+ status.put(currentStatus, OnlineTaskStatus.RUNNING);
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ logger.info("{}: time-domain simulation started - working on {} contingencies", stateId, contingenciesForSimulator.size());
+ logger.info("{}: stabilization started", stateId);
+ StabilizationResult stabilizationResult = stabilization.run();
+ logger.info("{}: stabilization terminated", stateId);
+ if ( stabilizationResult.getMetrics() != null ) {
+ logger.info("{}: stabilization metrics: {}", stateId, stabilizationResult.getMetrics());
+ if ( !stabilizationResult.getMetrics().isEmpty() )
+ onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.STABILIZATION, stabilizationResult.getMetrics());
+ }
if (stabilizationResult.getStatus() == StabilizationStatus.COMPLETED) {
ImpactAnalysisResult impactAnalysisResult = impactAnalysis.run(stabilizationResult.getState(), OnlineUtils.getContingencyIds(contingenciesForSimulator));
logger.info("{}: impact analysis terminated", stateId);
- if ( impactAnalysisResult.getMetrics() != null ) {
- logger.info("{}: impact analysis metrics: {}", stateId, impactAnalysisResult.getMetrics());
- if ( !impactAnalysisResult.getMetrics().isEmpty() )
- onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.IMPACT_ANALYSIS, impactAnalysisResult.getMetrics());
- }
+ if ( impactAnalysisResult.getMetrics() != null ) {
+ logger.info("{}: impact analysis metrics: {}", stateId, impactAnalysisResult.getMetrics());
+ if ( !impactAnalysisResult.getMetrics().isEmpty() )
+ onlineDb.storeMetrics(context.getWorkflowId(), stateId, OnlineStep.IMPACT_ANALYSIS, impactAnalysisResult.getMetrics());
+ }
putResultsIntoContext(stateId, impactAnalysisResult, context.getResults());
stateListener.onImpactAnalysisResults(stateId, context);
logger.info("{}: time-domain simulation terminated", stateId);
@@ -257,220 +282,220 @@ public Void call() throws Exception {
status.put(OnlineTaskType.TIME_DOMAIN_SIM, OnlineTaskStatus.FAILED);
stateListener.onUpdate(stateId, status,context.timeHorizon, "time-domain simulation failed (stabilization): metrics = "+stabilizationResult.getMetrics());
}
- }
- }
- stateListener.onUpdate(stateId, status,context.timeHorizon);
- } else {
- logger.error("{}: stop analisys of state: loadflow does not converge: metrics = {}", stateIdStr, result.getMetrics());
- stateListener.onUpdate(stateId, status,context.timeHorizon, "LoadFLow does not converge: metrics = " + result.getMetrics());
- }
- } catch (Throwable t) {
- status.put(currentStatus, OnlineTaskStatus.FAILED);
- //TODO manage string ifo detail
- stateListener.onUpdate(stateId, status,context.timeHorizon ,currentStatus +" failed ... ");
+ }
+ }
+ stateListener.onUpdate(stateId, status,context.timeHorizon);
+ } else {
+ logger.error("{}: stop analisys of state: loadflow does not converge: metrics = {}", stateIdStr, result.getMetrics());
+ stateListener.onUpdate(stateId, status,context.timeHorizon, "LoadFLow does not converge: metrics = " + result.getMetrics());
+ }
+ } catch (Throwable t) {
+ status.put(currentStatus, OnlineTaskStatus.FAILED);
+ //TODO manage string ifo detail
+ stateListener.onUpdate(stateId, status,context.timeHorizon ,currentStatus +" failed ... ");
logger.error("{}: Error working on state: {}", stateId, t.toString(), t);
}
- return null;
- }
-
- private void runOptimizer(Network network, List contingencies, List contingenciesForSimulator, ForecastAnalysisResults results) {
- String stateId = network.getStateManager().getWorkingStateId();
- logger.info("{}: running optimizer", stateId);
- List> postContingencyStateComputations = new ArrayList<>(contingencies.size());
- for (Contingency contingency : contingencies) {
- postContingencyStateComputations.add(
- new Callable() {
+ return null;
+ }
+
+ private void runOptimizer(Network network, List contingencies, List contingenciesForSimulator, ForecastAnalysisResults results) {
+ String stateId = network.getStateManager().getWorkingStateId();
+ logger.info("{}: running optimizer", stateId);
+ List> postContingencyStateComputations = new ArrayList<>(contingencies.size());
+ for (Contingency contingency : contingencies) {
+ postContingencyStateComputations.add(
+ new Callable() {
+
+ @Override
+ public Void call() throws Exception {
+ String postContingencyStateId = stateId + "-post-" + contingency.getId();
+ boolean loadflowConverge = computePostContingencyState(network, stateId, contingency, postContingencyStateId);
+ if ( loadflowConverge ) {
+ logger.info("{}: adding state {} to post contingency states for optimizer", stateId, postContingencyStateId);
+ PostContingencyState postContingencyState = new PostContingencyState(network, postContingencyStateId, contingency);
+ logger.info("{}: running optimizer on post contingency state {} of contingency {}", stateId, postContingencyStateId, contingency.getId());
+ CorrectiveControlOptimizerResult optimizerResult = null;
+ try {
+ optimizerResult = optimizer.run(postContingencyState);
+ } catch (Throwable t) {
+ logger.error("{}: Error running optimizer on contingency {}: {}", stateId, contingency.getId(), t.getMessage(), t);
+ optimizerResult = new CorrectiveControlOptimizerResult(contingency.getId(), false);
+ optimizerResult.setFinalStatus(CCOFinalStatus.OPTIMIZER_EXECUTION_ERROR);
+ optimizerResult.setCause(t.getMessage());
+ }
+ logger.info("{}: optimizer results for contingency {}: action found = {}, status = {}, cause = {}", stateId, contingency.getId(), optimizerResult.areActionsFound(), optimizerResult.getFinalStatus(), optimizerResult.getCause());
+ Map> actions = null;
+ if ( optimizerResult.areActionsFound() ) {
+ logger.info("{}: optimizer results: action plan {}, actions {} for contingency {}", stateId, optimizerResult.getActionPlan(), optimizerResult.getActionsIds(), contingency.getId());
+ actions = new HashMap>();
+ for(String actionId : optimizerResult.getActionsIds())
+ actions.put(actionId, optimizerResult.getEquipmentsWithParameters(actionId));
+ } else {
+ logger.error("{}: Error: optimizer didn't find actions for post contingency state {}", stateId, postContingencyStateId);
+ if ( !parameters.validation() ) { // if validation -> all the [contingency,state] pairs have already been added to the list for simulation -> no need to do it here
+ // add to contingencies for simulator
+ synchronized(contingenciesForSimulator) {
+ contingenciesForSimulator.add(contingency);
+ }
+ }
+ }
+ synchronized(results) {
+ results.addStateWithActions(contingency.getId(),
+ Integer.valueOf(stateId),
+ optimizerResult.areActionsFound(),
+ optimizerResult.getFinalStatus(),
+ optimizerResult.getCause(),
+ optimizerResult.getActionPlan(),
+ actions);
+ }
+ } else {
+ logger.info("{}: loadflow does not converge on post contigency state {}, the contingency {} will be analyzed by T-D simulation", stateId, postContingencyStateId, contingency.getId());
+ if ( !parameters.validation() ) { // if validation -> all the [contingency,state] pairs have already been added to the list for simulation -> no need to do it here
+ // add to contingencies for simulator
+ synchronized(contingenciesForSimulator) {
+ contingenciesForSimulator.add(contingency);
+ }
+ }
+ }
+
+ return null;
+ }
+
+ }
+ );
+ }
+ ExecutorService taskExecutor = Executors.newFixedThreadPool(contingencies.size());
+ try {
+ taskExecutor.invokeAll(postContingencyStateComputations);
+ } catch (InterruptedException e) {
+ logger.error("{}: Error running optimizer: {}", stateId, e.getMessage());
+ }
+ taskExecutor.shutdown();
+ network.getStateManager().setWorkingState(stateId);
+ }
- @Override
- public Void call() throws Exception {
- String postContingencyStateId = stateId + "-post-" + contingency.getId();
- boolean loadflowConverge = computePostContingencyState(network, stateId, contingency, postContingencyStateId);
- if ( loadflowConverge ) {
- logger.info("{}: adding state {} to post contingency states for optimizer", stateId, postContingencyStateId);
- PostContingencyState postContingencyState = new PostContingencyState(network, postContingencyStateId, contingency);
- logger.info("{}: running optimizer on post contingency state {} of contingency {}", stateId, postContingencyStateId, contingency.getId());
- CorrectiveControlOptimizerResult optimizerResult = null;
- try {
- optimizerResult = optimizer.run(postContingencyState);
- } catch (Throwable t) {
- logger.error("{}: Error running optimizer on contingency {}: {}", stateId, contingency.getId(), t.getMessage(), t);
- optimizerResult = new CorrectiveControlOptimizerResult(contingency.getId(), false);
- optimizerResult.setFinalStatus(CCOFinalStatus.OPTIMIZER_EXECUTION_ERROR);
- optimizerResult.setCause(t.getMessage());
- }
- logger.info("{}: optimizer results for contingency {}: action found = {}, status = {}, cause = {}", stateId, contingency.getId(), optimizerResult.areActionsFound(), optimizerResult.getFinalStatus(), optimizerResult.getCause());
- Map> actions = null;
- if ( optimizerResult.areActionsFound() ) {
- logger.info("{}: optimizer results: action plan {}, actions {} for contingency {}", stateId, optimizerResult.getActionPlan(), optimizerResult.getActionsIds(), contingency.getId());
- actions = new HashMap>();
- for(String actionId : optimizerResult.getActionsIds())
- actions.put(actionId, optimizerResult.getEquipmentsWithParameters(actionId));
- } else {
- logger.error("{}: Error: optimizer didn't find actions for post contingency state {}", stateId, postContingencyStateId);
- if ( !parameters.validation() ) { // if validation -> all the [contingency,state] pairs have already been added to the list for simulation -> no need to do it here
- // add to contingencies for simulator
- synchronized(contingenciesForSimulator) {
- contingenciesForSimulator.add(contingency);
- }
- }
- }
- synchronized(results) {
- results.addStateWithActions(contingency.getId(),
- Integer.valueOf(stateId),
- optimizerResult.areActionsFound(),
- optimizerResult.getFinalStatus(),
- optimizerResult.getCause(),
- optimizerResult.getActionPlan(),
- actions);
- }
- } else {
- logger.info("{}: loadflow does not converge on post contigency state {}, the contingency {} will be analyzed by T-D simulation", stateId, postContingencyStateId, contingency.getId());
- if ( !parameters.validation() ) { // if validation -> all the [contingency,state] pairs have already been added to the list for simulation -> no need to do it here
- // add to contingencies for simulator
- synchronized(contingenciesForSimulator) {
- contingenciesForSimulator.add(contingency);
- }
- }
- }
-
- return null;
- }
-
- }
- );
- }
- ExecutorService taskExecutor = Executors.newFixedThreadPool(contingencies.size());
- try {
- taskExecutor.invokeAll(postContingencyStateComputations);
- } catch (InterruptedException e) {
- logger.error("{}: Error running optimizer: {}", stateId, e.getMessage());
- }
- taskExecutor.shutdown();
- network.getStateManager().setWorkingState(stateId);
- }
-
- private void computeAndStorePostContingencyViolations(Network network, List contingencies) {
- String stateId = network.getStateManager().getWorkingStateId();
- logger.info("{}: computing post contingency violations", stateId);
- List> postContingencyViolationsComputations = new ArrayList<>(contingencies.size());
- for (Contingency contingency : contingencies) {
- postContingencyViolationsComputations.add(
- new Callable() {
+ private void computeAndStorePostContingencyViolations(Network network, List contingencies) {
+ String stateId = network.getStateManager().getWorkingStateId();
+ logger.info("{}: computing post contingency violations", stateId);
+ List> postContingencyViolationsComputations = new ArrayList<>(contingencies.size());
+ for (Contingency contingency : contingencies) {
+ postContingencyViolationsComputations.add(
+ new Callable() {
- @Override
- public Void call() throws Exception {
- List violations = new ArrayList();
- // compute post contingency state
- String postContingencyStateId = stateId + "-post-" + contingency.getId();
- boolean loadflowConverge = computePostContingencyState(network, stateId, contingency, postContingencyStateId);
- if ( loadflowConverge ) {
- logger.info("{}: computing post contingency violations for contingency {}", stateId, contingency.getId());
- violations = Security.checkLimits(network, CurrentLimitType.PATL, Integer.MAX_VALUE, parameters.getLimitReduction());
- if ( violations == null || violations.isEmpty() ) {
- logger.info("{}: no post contingency violations for state {} and contingency {}", stateId, contingency.getId());
- violations = new ArrayList();
- }
- } else {
- logger.info("{}: post contingency loadflow does not converge for contingency {}, skipping computing post contingency violations", stateId, contingency.getId());
- }
- logger.info("{}: storing post contingency violations for state {} and contingency {} in online db", stateId, contingency.getId());
- onlineDb.storePostContingencyViolations(context.getWorkflowId(), Integer.valueOf(stateId), contingency.getId(), loadflowConverge, violations);
- network.getStateManager().setWorkingState(stateId);
-// network.getStateManager().removeState(postContingencyStateId);
- return null;
- }
- }
- );
- }
- ExecutorService taskExecutor = Executors.newFixedThreadPool(contingencies.size());
- try {
- taskExecutor.invokeAll(postContingencyViolationsComputations);
- } catch (InterruptedException e) {
- logger.error("{}: Error computing post contingency vioations: {}", stateId, e.getMessage());
- }
- taskExecutor.shutdown();
- }
-
- private boolean computePostContingencyState(Network network, String stateId, Contingency contingency, String postContingencyStateId) {
- boolean loadflowConverge = false;
- logger.info("{}: computing post contingency state for contingency {}", stateId, contingency.getId());
- //String postContingencyStateId = stateId + "-post-" + contingency.getId();
- boolean alreadyProcessed = false;
- synchronized (loadflowResults) {
- if ( loadflowResults.containsKey(postContingencyStateId) ) {
- alreadyProcessed = true;
- loadflowConverge = loadflowResults.get(postContingencyStateId);
- }
- }
- if ( alreadyProcessed && network.getStateManager().getStateIds().contains(postContingencyStateId) ) {
- // post contingency state already computed, avoid to run the load flow again
- logger.info("{}: post contingency state {} already computed", stateId, postContingencyStateId);
- network.getStateManager().setWorkingState(postContingencyStateId);
- } else {
- // create post contingency state
- logger.info("{}: creating post contingency state {}", stateId, postContingencyStateId);
- network.getStateManager().cloneState(stateId, postContingencyStateId);
- network.getStateManager().setWorkingState(postContingencyStateId);
- // apply contingency to post contingency state
- logger.info("{}: applying contingency {} to post contingency state {}", stateId, contingency.getId(), postContingencyStateId);
- contingency.toTask().modify(network);
- try {
- // run load flow on post contingency state
- logger.info("{}: running load flow on post contingency state {}", stateId, postContingencyStateId);
- LoadFlowResult result = loadFlow.run();
- if ( result.isOk() ) {
- logger.info("{}: load flow on post contingency state {} converge", stateId, postContingencyStateId);
- loadflowConverge = true;
- } else {
- logger.info("{}: load flow on post contingency state {} does not converge", stateId, postContingencyStateId);
- loadflowConverge = false;
- }
- synchronized (loadflowResults) {
- loadflowResults.put(postContingencyStateId, loadflowConverge);
- }
- } catch (Exception e) {
- logger.info("{}: error running load flow on post contingency state {}: {}", stateId, postContingencyStateId, e.getMessage());
- loadflowConverge = false;
- }
- }
- //network.getStateManager().setWorkingState(stateId);
- return loadflowConverge;
- }
+ @Override
+ public Void call() throws Exception {
+ List violations = new ArrayList();
+ // compute post contingency state
+ String postContingencyStateId = stateId + "-post-" + contingency.getId();
+ boolean loadflowConverge = computePostContingencyState(network, stateId, contingency, postContingencyStateId);
+ if ( loadflowConverge ) {
+ logger.info("{}: computing post contingency violations for contingency {}", stateId, contingency.getId());
+ violations = Security.checkLimits(network, CurrentLimitType.PATL, Integer.MAX_VALUE, parameters.getLimitReduction());
+ if ( violations == null || violations.isEmpty() ) {
+ logger.info("{}: no post contingency violations for contingency {}", stateId, contingency.getId());
+ violations = new ArrayList();
+ }
+ } else {
+ logger.info("{}: post contingency loadflow does not converge for contingency {}, skipping computing post contingency violations", stateId, contingency.getId());
+ }
+ logger.info("{}: storing post contingency violations/loadflow results for contingency {} in online db", stateId, contingency.getId());
+ onlineDb.storePostContingencyViolations(context.getWorkflowId(), Integer.valueOf(stateId), contingency.getId(), loadflowConverge, violations);
+ network.getStateManager().setWorkingState(stateId);
+ // network.getStateManager().removeState(postContingencyStateId);
+ return null;
+ }
+ }
+ );
+ }
+ ExecutorService taskExecutor = Executors.newFixedThreadPool(contingencies.size());
+ try {
+ taskExecutor.invokeAll(postContingencyViolationsComputations);
+ } catch (InterruptedException e) {
+ logger.error("{}: Error computing post contingency vioations: {}", stateId, e.getMessage());
+ }
+ taskExecutor.shutdown();
+ }
- private void putResultsIntoContext(Integer stateId, ImpactAnalysisResult simulationResult, ForecastAnalysisResults results) {
- Objects.requireNonNull(stateId, "state id is null");
- Objects.requireNonNull(simulationResult, "simulation result is null");
- Objects.requireNonNull(results, "forecast analysis result is null");
- List securityIndexesList = new ArrayList();
- if ( parameters.getSecurityIndexes() == null )
- securityIndexesList = simulationResult.getSecurityIndexes();
- else {
- securityIndexesList = simulationResult.getSecurityIndexes().stream().filter(x -> parameters.getSecurityIndexes().contains(x.getId().getSecurityIndexType())).collect(Collectors.toList());
- if (securityIndexesList.isEmpty()) {
- logger.info("Empty filter security indexes -> using all the indexes");
- securityIndexesList = simulationResult.getSecurityIndexes();
- }
- }
+ private boolean computePostContingencyState(Network network, String stateId, Contingency contingency, String postContingencyStateId) {
+ boolean loadflowConverge = false;
+ logger.info("{}: computing post contingency state for contingency {}", stateId, contingency.getId());
+ //String postContingencyStateId = stateId + "-post-" + contingency.getId();
+ boolean alreadyProcessed = false;
+ synchronized (loadflowResults) {
+ if ( loadflowResults.containsKey(postContingencyStateId) ) {
+ alreadyProcessed = true;
+ loadflowConverge = loadflowResults.get(postContingencyStateId);
+ }
+ }
+ if ( alreadyProcessed && network.getStateManager().getStateIds().contains(postContingencyStateId) ) {
+ // post contingency state already computed, avoid to run the load flow again
+ logger.info("{}: post contingency state {} already computed", stateId, postContingencyStateId);
+ network.getStateManager().setWorkingState(postContingencyStateId);
+ } else {
+ // create post contingency state
+ logger.info("{}: creating post contingency state {}", stateId, postContingencyStateId);
+ network.getStateManager().cloneState(stateId, postContingencyStateId);
+ network.getStateManager().setWorkingState(postContingencyStateId);
+ // apply contingency to post contingency state
+ logger.info("{}: applying contingency {} to post contingency state {}", stateId, contingency.getId(), postContingencyStateId);
+ contingency.toTask().modify(network);
+ try {
+ // run load flow on post contingency state
+ logger.info("{}: running load flow on post contingency state {}", stateId, postContingencyStateId);
+ LoadFlowResult result = loadFlow.run();
+ if ( result.isOk() ) {
+ logger.info("{}: load flow on post contingency state {} converge", stateId, postContingencyStateId);
+ loadflowConverge = true;
+ } else {
+ logger.info("{}: load flow on post contingency state {} does not converge", stateId, postContingencyStateId);
+ loadflowConverge = false;
+ }
+ synchronized (loadflowResults) {
+ loadflowResults.put(postContingencyStateId, loadflowConverge);
+ }
+ } catch (Exception e) {
+ logger.info("{}: error running load flow on post contingency state {}: {}", stateId, postContingencyStateId, e.getMessage());
+ loadflowConverge = false;
+ }
+ }
+ //network.getStateManager().setWorkingState(stateId);
+ return loadflowConverge;
+ }
+
+ private void putResultsIntoContext(Integer stateId, ImpactAnalysisResult simulationResult, ForecastAnalysisResults results) {
+ Objects.requireNonNull(stateId, "state id is null");
+ Objects.requireNonNull(simulationResult, "simulation result is null");
+ Objects.requireNonNull(results, "forecast analysis result is null");
+ List securityIndexesList = new ArrayList();
+ if ( parameters.getSecurityIndexes() == null )
+ securityIndexesList = simulationResult.getSecurityIndexes();
+ else {
+ securityIndexesList = simulationResult.getSecurityIndexes().stream().filter(x -> parameters.getSecurityIndexes().contains(x.getId().getSecurityIndexType())).collect(Collectors.toList());
+ if (securityIndexesList.isEmpty()) {
+ logger.info("Empty filter security indexes -> using all the indexes");
+ securityIndexesList = simulationResult.getSecurityIndexes();
+ }
+ }
//Multimap securityIndexes = Multimaps.index(simulationResult.getSecurityIndexes(), new Function() {
- Multimap securityIndexes = Multimaps.index(securityIndexesList, new Function() {
+ Multimap securityIndexes = Multimaps.index(securityIndexesList, new Function() {
@Override
public String apply(SecurityIndex index){
return index.getId().getContingencyId();
}
});
- synchronized (results) {
- for (Map.Entry> entry : securityIndexes.asMap().entrySet()) {
- boolean isSafe = OnlineUtils.isSafe(entry.getValue());
- if ( !isSafe ) {
- logger.info("{}: unsafe for contingency {} afer time domain simulation", stateId, entry.getKey());
- results.addUnsafeStateWithIndexes(entry.getKey(), stateId, new ArrayList<>(entry.getValue()));
- } else {
- logger.info("{}: safe for contingency {} afer time domain simulation", stateId, entry.getKey());
- if ( parameters.validation() ) // if validation add anyway to results
- results.addUnsafeStateWithIndexes(entry.getKey(), stateId, new ArrayList<>(entry.getValue()));
- }
- }
- }
- }
+ synchronized (results) {
+ for (Map.Entry> entry : securityIndexes.asMap().entrySet()) {
+ boolean isSafe = OnlineUtils.isSafe(entry.getValue());
+ if ( !isSafe ) {
+ logger.info("{}: unsafe for contingency {} afer time domain simulation", stateId, entry.getKey());
+ results.addUnsafeStateWithIndexes(entry.getKey(), stateId, new ArrayList<>(entry.getValue()));
+ } else {
+ logger.info("{}: safe for contingency {} afer time domain simulation", stateId, entry.getKey());
+ if ( parameters.validation() ) // if validation add anyway to results
+ results.addUnsafeStateWithIndexes(entry.getKey(), stateId, new ArrayList<>(entry.getValue()));
+ }
+ }
+ }
+ }
}
diff --git a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java
index 165c1e26..dbbe5b98 100644
--- a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java
+++ b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java
@@ -1,5 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
+ * Copyright (c) 2016, RTE (http://www.rte-france.com
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -78,1743 +79,1796 @@
*/
public class OnlineDbMVStore implements OnlineDb {
- private static final String STORED_WORKFLOW_PREFIX = "wf-";
- private static final String STORED_METRICS_STEPS_MAP_NAME = "storedSteps";
- private static final String STORED_METRICS_STATES_MAP_SUFFIX = "_states";
- private static final String STORED_METRICS_PARAMS_MAP_SUFFIX = "_params";
- private static final String STORED_RESULTS_MAP_NAME = "wfResults";
- private static final String STORED_RESULTS_ACTIONS_MAP_SUFFIX = "_actions";
- private static final String STORED_RESULTS_ACTIONINFO_MAP_SUFFIX = "_actionplans"; // i do not change this, for backward compatibility
- private static final String STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX = "_actionsfound";
- private static final String STORED_RESULTS_ACTIONINFO_STATUS_KEY_SUFFIX = "_status";
- private static final String STORED_RESULTS_ACTIONINFO_CAUSE_KEY_SUFFIX = "_cause";
- private static final String STORED_RESULTS_ACTIONINFO_ACTIONPLAN_KEY_SUFFIX = "_actionplan";
- private static final String STORED_RESULTS_ACTIONS_EQUIPMENTS_MAP_SUFFIX = "_actionequipments";
- private static final String STORED_RESULTS_ACTIONS_PARAMETERS_MAP_SUFFIX = "_actionparameters";
- private static final String STORED_RESULTS_INDEXES_MAP_SUFFIX = "_indexes";
- private static final String STORED_RESULTS_TIMEHORIZON_KEY = "time_orizon";
- private static final String STORED_RESULTS_CONTINGENCIES_WITH_ACTIONS_KEY = "contingiencies_with_actions";
- private static final String STORED_RESULTS_UNSAFE_CONTINGENCIES_KEY = "unsafe_contingiencies";
- private static final String STORED_RULES_RESULTS_MAP_NAME = "wfRulesResults";
- private static final String STORED_RULES_RESULTS_STATE_RESULTS_MAP_SUFFIX = "_rulesresults";
- private static final String STORED_RULES_RESULTS_STATE_STATUS_MAP_SUFFIX = "_rulesstatus";
- private static final String STORED_RULES_RESULTS_CONTINGENCIES_WITH_RULES_KEY = "contingiencies_with_rules";
- private static final String STORED_WCA_RESULTS_MAP_NAME = "wfWcaResults";
- private static final String STORED_WCA_RESULTS_CLUSTERS_MAP_NAME = "contingencies_wcaclusters";
- private static final String STORED_WCA_RESULTS_CAUSES_MAP_NAME = "contingencies_wcacause";
- private static final String STORED_PARAMETERS_MAP_NAME = "wfParameters";
- private static final String STORED_PARAMETERS_BASECASE_KEY = "basecase";
- private static final String STORED_PARAMETERS_STATE_NUMBER_KEY = "state_number";
- private static final String STORED_PARAMETERS_HISTO_INTERVAL_KEY = "histo_interval";
- private static final String STORED_PARAMETERS_OFFLINE_WF_ID_KEY = "offline_wf";
- private static final String STORED_PARAMETERS_FEA_ID_KEY = "fe_analysis";
- private static final String STORED_PARAMETERS_RULES_PURITY_KEY = "rules_purity";
- private static final String STORED_PARAMETERS_STORE_STATES_KEY = "store_states";
- private static final String STORED_PARAMETERS_ANALYSE_BASECASE_KEY = "analyse_basecase";
- private static final String STORED_PARAMETERS_VALIDATION_KEY = "validation";
- private static final String STORED_PARAMETERS_SECURITY_INDEXES_KEY = "security_indexes";
- private static final String STORED_PARAMETERS_CASE_TYPE_KEY = "case_type";
- private static final String STORED_PARAMETERS_COUNTRIES_KEY = "countries";
- private static final String STORED_PARAMETERS_MERGE_OPTIMIZED_KEY = "merge_optimized";
- private static final String STORED_PARAMETERS_LIMIT_REDUCTION_KEY = "limit_reduction";
- private static final String STORED_STATES_PROCESSING_STATUS_MAP_NAME = "statesProcessingStatus";
- private static final String STORED_STATES_LIST_KEY = "states";
- private static final String STORED_STATES_STATE_DETAILS_KEY = "stateStatusDetails";
- private static final String STORED_STATE_PROCESSING_STATUS_MAP_SUFFIX = "_processingstatus";
- private static final String STORED_WORKFLOW_STATES_FOLDER_PREFIX = "states-wf-";
- private static final String STORED_STATE_PREFIX = "state-";
- private static final String STORED_VIOLATIONS_STEPS_MAP_NAME = "storedViolationsSteps";
- private static final String STORED_VIOLATIONS_STATES_MAP_SUFFIX = "_violationsstates";
- private static final String STORED_VIOLATIONS_STATES_MAP_NAME = "storedViolationsStates";
- private static final String STORED_VIOLATIONS_STEPS_MAP_SUFFIX = "_violationssteps";
- private static final String STORED_VIOLATIONS_MAP_PREFIX = "violations_";
- private static final String STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_NAME = "storedPCViolationsContingencies";
- private static final String STORED_PC_VIOLATIONS_STATES_MAP_SUFFIX = "_pcviolationsstates";
- private static final String STORED_PC_VIOLATIONS_STATES_MAP_NAME = "storedPCViolationsStates";
- private static final String STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX = "_pcviolationscontigencies";
- private static final String STORED_PC_VIOLATIONS_MAP_PREFIX = "pcviolations_";
- private static final String STORED_PC_LOADFLOW_CONTINGENCIES_MAP_NAME = "storedPCLoadflowContingencies";
- private static final String STORED_PC_LOADFLOW_STATES_MAP_SUFFIX = "_pcloadflowstates";;
- private static final String STORED_PC_LOADFLOW_STATES_MAP_NAME = "storedPCLoadflowStates";
- private static final String STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX = "_pcloadflowcontigencies";
- private static final String STORED_WCA_RULES_RESULTS_MAP_NAME = "wfWcaRulesResults";
- private static final String STORED_WCA_RULES_RESULTS_STATE_RESULTS_MAP_SUFFIX = "_wcarulesresults";
- private static final String STORED_WCA_RULES_RESULTS_STATE_STATUS_MAP_SUFFIX = "_wcarulesstatus";
- private static final String SERIALIZED_STATES_FILENAME = "network-states.csv";
-
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OnlineDbMVStore.class);
-
-
- private OnlineDbMVStoreConfig config = null;
-
- HashMap storedWFMetrics = new HashMap();
- ConcurrentHashMap>> workflowsStates = new ConcurrentHashMap>>();
-
- MVMapConcurrent.Builder mapBuilder;
-
-
- public OnlineDbMVStore(OnlineDbMVStoreConfig config) {
- this.config = config;
- LOGGER.info(config.toString());
- Path storageFolder = config.getOnlineDbDir();
- if ( !Files.exists(storageFolder) ) {
- try {
- Files.createDirectories(storageFolder);
- } catch (IOException e) {
- String errorMessage = "online db folder " + storageFolder + " does not exist and cannot be created: " + e.getMessage();
- LOGGER.error(errorMessage);
- throw new RuntimeException(errorMessage);
- }
- }
- mapBuilder = new MVMapConcurrent.Builder();
- }
-
- public OnlineDbMVStore() {
- this(OnlineDbMVStoreConfig.load());
- }
-
- private synchronized void closeStores() {
- ArrayList workflowIds = new ArrayList();
- for (String storedWorkflowId : storedWFMetrics.keySet()) {
- MVStore wfMVStore = storedWFMetrics.get(storedWorkflowId);
- wfMVStore.close();
- workflowIds.add(storedWorkflowId);
- }
- for (String workflowId : workflowIds) {
- storedWFMetrics.remove(workflowId);
- }
- }
-
- private synchronized MVStore getStore(String workflowId) {
- MVStore wfMVStore;
- if ( storedWFMetrics.containsKey(workflowId))
- wfMVStore = storedWFMetrics.get(workflowId);
- else {
- LOGGER.debug("Opening file for workflow {}", workflowId);
- wfMVStore = MVStore.open(config.getOnlineDbDir().toString() + File.separator + STORED_WORKFLOW_PREFIX + workflowId);
- storedWFMetrics.put(workflowId, wfMVStore);
- }
- return wfMVStore;
- }
-
- @Override
- public void storeMetrics(String workflowId, OnlineStep step, Map metrics) {
- LOGGER.info("Storing metrics for wf {} and step {}", workflowId, step.name());
- storeMetrics(workflowId, step.name() + "__", metrics);
- LOGGER.info("Storing metadata for wf {} and step {}", workflowId, step.name());
- storeStepMetadata(workflowId, "_", step, metrics);
-
- }
-
- @Override
- public void storeMetrics(String workflowId, Integer stateId, OnlineStep step, Map metrics) {
- String stateIdStr=String.valueOf(stateId);
- LOGGER.info("Storing metrics for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr);
- storeMetrics(workflowId, step.name() + "_" + stateIdStr, metrics);
- LOGGER.info("Storing metadata for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr);
- storeStepMetadata(workflowId, stateIdStr, step, metrics);
- }
-
- private void storeMetrics(String workflowId, String mapName, Map metrics) {
- try {
- MVStore wfMVStore = getStore(workflowId);
- Map metricsMap = wfMVStore.openMap(mapName, mapBuilder);
- for ( String parameter : metrics.keySet() ) {
- metricsMap.put(parameter, metrics.get(parameter));
- }
- wfMVStore.commit();
- } catch(Throwable e) {
- String errorMessage = "Error storing metrics for wf " + workflowId + " in map " + mapName + ": " + e.getMessage();
- LOGGER.error(errorMessage);
- throw new RuntimeException(errorMessage);
- }
- }
-
- private void storeStepMetadata(String workflowId, String stateId, OnlineStep step, Map metrics) {
- try {
- MVStore wfMVStore = getStore(workflowId);
- // save info about stored wf steps
- MVMap storedStepsMap = wfMVStore.openMap(STORED_METRICS_STEPS_MAP_NAME, mapBuilder);
- storedStepsMap.putIfAbsent(step.name(), "1");
- // save info about stored states per step
- MVMap stepStatesMap = wfMVStore.openMap(step.name() + STORED_METRICS_STATES_MAP_SUFFIX, mapBuilder);
- stepStatesMap.putIfAbsent(stateId, "");
- // save info about stored params per step
- MVMap stepParamsMap = wfMVStore.openMap(step.name() + STORED_METRICS_PARAMS_MAP_SUFFIX, mapBuilder);
- for ( String parameter : metrics.keySet() ) {
- stepParamsMap.putIfAbsent(parameter, "");
- }
- wfMVStore.commit();
- } catch(Throwable e) {
- String errorMessage = "Error storing metadata for wf " + workflowId + ", step "+ step.name() + ", state " + stateId + ": " + e.getMessage();
- LOGGER.error(errorMessage);
- throw new RuntimeException(errorMessage);
- }
- }
-
- @Override
- public Map getMetrics(String workflowId, OnlineStep step) {
- LOGGER.info("Getting metrics from wf {} and step {}", workflowId, step.name());
- return getMetrics(workflowId, step.name() + "__");
- }
-
- @Override
- public Map getMetrics(String workflowId, Integer stateId, OnlineStep step) {
- String stateIdStr=String.valueOf(stateId);
- LOGGER.info("Getting metrics from wf {}, step {} and state {}", workflowId, step.name(), stateIdStr);
- return getMetrics(workflowId, step.name() + "_" + stateIdStr);
- }
-
- private Map getMetrics(String workflowId, String mapName) {
- if ( isWorkflowStored(workflowId) ) {
- HashMap metrics = new HashMap();
- MVStore wfMVStore = getStore(workflowId);
- if ( wfMVStore.getMapNames().contains(mapName) ) {
- Map storedMap = wfMVStore.openMap(mapName, mapBuilder);
- for ( String parameter : storedMap.keySet() ) {
- metrics.put(parameter, storedMap.get(parameter));
- }
- }
- return metrics;
- } else {
- LOGGER.warn("No data about wf {}", workflowId);
- return null;
- }
- }
-
- @Override
- public String getCsvMetrics(String workflowId, OnlineStep step) {
- LOGGER.info("Preparing CSV data for wf {} and step {}", workflowId, step.name());
- if ( isWorkflowStored(workflowId) ) {
- StringWriter content = new StringWriter();
- CsvWriter cvsWriter = new CsvWriter(content, ',');
- try {
- MVStore wfMVStore = getStore(workflowId);
- // check if there are stored metrics
- Map storedStepsMap = wfMVStore.openMap(STORED_METRICS_STEPS_MAP_NAME, mapBuilder);
- if ( storedStepsMap.containsKey(step.name()) ) {
- MVMap stepParamsMap = wfMVStore.openMap(step.name() + STORED_METRICS_PARAMS_MAP_SUFFIX, mapBuilder);
- MVMap stepStatesMap = wfMVStore.openMap(step.name() + STORED_METRICS_STATES_MAP_SUFFIX, mapBuilder);
- // write headers
- //LOGGER.debug("Preparing CSV headers for wf {} and step {}", workflowId, step.name());
- String[] headers = new String[stepParamsMap.keySet().size()+1];
- headers[0] = "state";
- HashMap paramsIndexes = new HashMap<>();
- int i = 1;
- for ( String parameter : stepParamsMap.keySet() ) {
- headers[i] = parameter;
- paramsIndexes.put(parameter, i);
- i++;
- }
- cvsWriter.writeRecord(headers);
- // write step general metrics, if stored
- if ( stepStatesMap.containsKey("_") ) {
- //LOGGER.debug("Preparing CSV data for wf {} and step {} - general step metrics", workflowId, step.name());
- String[] values = getStoredMapValues(wfMVStore, "_", step, stepParamsMap.keySet().size(), paramsIndexes);
- cvsWriter.writeRecord(values);
- }
- // write step metrics for each state, if stored
- for ( String stateId : stepStatesMap.keySet() ) {
- if (!"_".equals(stateId) ) {
- //LOGGER.debug("Preparing CSV data for wf {} and step {} - state {} metrics", workflowId, step.name(), stateId);
- String[] values = getStoredMapValues(wfMVStore, stateId, step, stepParamsMap.keySet().size(), paramsIndexes);
- cvsWriter.writeRecord(values);
- }
- }
- }
- } catch (IOException e) {
- String errorMessage = "error getting cvs data for step " + step.name() + " and wf id " + workflowId;
- LOGGER.error(errorMessage);
- throw new RuntimeException(errorMessage);
- }
- cvsWriter.flush();
- return content.toString();
- } else {
- LOGGER.warn("No data about wf {}", workflowId);
- return null;
- }
- }
-
- private String[] getStoredMapValues(MVStore wfMVStore, String stateId, OnlineStep step, int paramsN, HashMap paramsIndexes) {
- String[] values = new String[paramsN+1];
- values[0] = stateId;
- Map storedMap = wfMVStore.openMap(step.name() + "_" + stateId, mapBuilder);
- for ( String parameter : storedMap.keySet() ) {
- int index = paramsIndexes.get(parameter);
- values[index] = storedMap.get(parameter);
- }
- return values;
- }
-
- @Override
- public List listWorkflows() {
- LOGGER.info("Getting list of stored workflows");
- List workflowIds = new ArrayList();
- File[] files = config.getOnlineDbDir().toFile().listFiles(new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return name.toLowerCase().startsWith(STORED_WORKFLOW_PREFIX);
- }
- });
- Arrays.sort(files, new Comparator(){
- public int compare(File f1, File f2)
- {
- return Long.valueOf(f1.lastModified()).compareTo(f2.lastModified());
- } });
- for (File file : files) {
- if ( file.isFile() ) {
- OnlineWorkflowDetails workflowDetails = new OnlineWorkflowDetails(file.getName().substring(STORED_WORKFLOW_PREFIX.length()));
- workflowDetails.setWorkflowDate(new DateTime(file.lastModified()));
- workflowIds.add(workflowDetails);
- }
- }
- LOGGER.info("Found {} workflow(s)", workflowIds.size());
- return workflowIds;
- }
-
- @Override
- public List