Skip to content

Commit

Permalink
Merge pull request #234 from itesla/fea_histodb_client
Browse files Browse the repository at this point in the history
Use histodb client instead of REST API to access historical data duri…
  • Loading branch information
mathbagu committed Apr 10, 2017
2 parents b44fe12 + e4768c1 commit d086e64
Show file tree
Hide file tree
Showing 11 changed files with 273 additions and 470 deletions.
10 changes: 10 additions & 0 deletions mcla-integration/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,21 @@
<version>${project.version}</version>
</dependency>

<dependency>
<groupId>com.google.jimfs</groupId>
<artifactId>jimfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
@@ -1,91 +1,63 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
* Copyright (c) 2017, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package eu.itesla_project.mcla.forecast_errors;

import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Objects;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;

import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import eu.itesla_project.modules.online.TimeHorizon;
import eu.itesla_project.modules.histo.HistoDbAttr;
import eu.itesla_project.modules.histo.HistoDbAttributeId;
import eu.itesla_project.modules.histo.HistoDbClient;
import eu.itesla_project.modules.histo.HistoDbHorizon;
import eu.itesla_project.modules.histo.HistoDbMetaAttributeId;
import eu.itesla_project.modules.histo.HistoDbMetaAttributeType;
import eu.itesla_project.modules.histo.HistoDbNetworkAttributeId;
import eu.itesla_project.modules.histo.HistoQueryType;

/**
*
* @author Quinary <itesla@quinary.com>
*/
public class FEAHistoDBFacade {

private static final Logger LOGGER = LoggerFactory.getLogger(FEAHistoDBFacade.class);

private final int MAXRECORDSNUM = 100000;

DataMiningFacadeRestConfig config;
TimeHorizon timeHorizon;
Interval histoInterval;
ArrayList<String> generatorsIds;
ArrayList<String> loadsIds;
public final class FEAHistoDBFacade {

public FEAHistoDBFacade(DataMiningFacadeRestConfig config, TimeHorizon timeHorizon, Interval histoInterval,
ArrayList<String> generatorsIds, ArrayList<String> loadsIds) {
Objects.requireNonNull(config, "config is null");
Objects.requireNonNull(timeHorizon, "time horizon is null");
Objects.requireNonNull(histoInterval, "histo interval is null");
Objects.requireNonNull(generatorsIds, "generatorsIds is null");
Objects.requireNonNull(loadsIds, "loadsIds is null");

this.config = config;
this.timeHorizon = timeHorizon;
this.histoInterval = histoInterval;
this.generatorsIds = generatorsIds;
this.loadsIds = loadsIds;
}
private FEAHistoDBFacade() {
}

public void historicalDataToCsvFile(Path historicalDataCsvFile) throws Exception {
String query = historicalDataQuery();
LOGGER.info("Downloading data from HistoDB to file " + historicalDataCsvFile);
LOGGER.debug("HistoDB query = " + query);
HttpsClientHelper.remoteDataToFilePOST(
config.getRestServiceUrl(),
query,
config.getServiceUser(),
config.getServicePassword(),
historicalDataCsvFile.toString());
}

protected String historicalDataQuery() {
String query = "headers=true";
query += "&count=" + MAXRECORDSNUM;
DateTimeFormatter dateFormatter = ISODateTimeFormat.date();
DateTime intervalStart = histoInterval.getStart();
DateTime intervalEnd = histoInterval.getEnd();
query += "&time=[" + intervalStart.toString(dateFormatter) + "," + intervalEnd.toString(dateFormatter) + "]";
switch (timeHorizon) {
case DACF:
query += "&horizon=" + timeHorizon.getName();
break;
default:
throw new AssertionError();
}
if ( timeHorizon.getForecastTime() >= 0 )
query += "&forecast=" + timeHorizon.getForecastTime();
query += "&cols=datetime,horizon,forecastTime";
for ( String generatorId : generatorsIds ) {
query += "," + generatorId + "_P" + "," + generatorId + "_Q";
}
for ( String loadId : loadsIds ) {
query += "," + loadId + "_P" + "," + loadId + "_Q";
}
return query;
}
public static void historicalDataToCsvFile(HistoDbClient histoDbClient, List<String> generatorsIds, List<String> loadsIds,
Interval histoInterval, Path historicalDataCsvFile) throws Exception {
Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>();
attributeIds.add(new HistoDbMetaAttributeId(HistoDbMetaAttributeType.datetime));
attributeIds.add(new HistoDbMetaAttributeId(HistoDbMetaAttributeType.horizon));
attributeIds.add(new HistoDbMetaAttributeId(HistoDbMetaAttributeType.forecastTime));
generatorsIds.forEach( generatorId ->
{
attributeIds.add(new HistoDbNetworkAttributeId(generatorId, HistoDbAttr.P));
attributeIds.add(new HistoDbNetworkAttributeId(generatorId, HistoDbAttr.Q));
});
loadsIds.forEach( loadId ->
{
attributeIds.add(new HistoDbNetworkAttributeId(loadId, HistoDbAttr.P));
attributeIds.add(new HistoDbNetworkAttributeId(loadId, HistoDbAttr.Q));
});
try (InputStream is = histoDbClient.queryCsv(HistoQueryType.forecastDiff,
attributeIds,
histoInterval,
HistoDbHorizon.DACF,
false,
false)) {
Files.copy(is, historicalDataCsvFile);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@ public class ForecastErrorsAnalyzerConfig {

private Path binariesDir;
private Path runtimeHomeDir;
private String histoDBUser;
private String histoDBPassword;
private String histoDBServiceUrl;
private final boolean debug;
private final Integer rngSeed;
private final Integer checkModule0;
Expand All @@ -53,9 +50,6 @@ public class ForecastErrorsAnalyzerConfig {
public ForecastErrorsAnalyzerConfig(
Path binariesDir,
Path runtimeHomeDir,
String histoDBServiceUrl,
String histoDBUser,
String histoDBPassword,
Integer checkModule0,
double percpuGaussLoad,
double percpuGaussRes,
Expand All @@ -79,15 +73,9 @@ public ForecastErrorsAnalyzerConfig(
) {
Objects.requireNonNull(binariesDir,"sampler compiled binaries directory is null");
Objects.requireNonNull(runtimeHomeDir,"matlab runtime directory is null");
Objects.requireNonNull(histoDBServiceUrl, "histodb service url is null");
Objects.requireNonNull(histoDBUser, "histodb user is null");
Objects.requireNonNull(histoDBPassword, "histodb password is null");

this.binariesDir=binariesDir;
this.runtimeHomeDir = runtimeHomeDir;
this.histoDBServiceUrl = histoDBServiceUrl;
this.histoDBUser = histoDBUser;
this.histoDBPassword = histoDBPassword;
this.rngSeed = rngSeed;
this.checkModule0=checkModule0;
this.percpuGaussLoad=percpuGaussLoad;
Expand Down Expand Up @@ -115,9 +103,6 @@ public static ForecastErrorsAnalyzerConfig load() {

Path binariesDir = config.getPathProperty("binariesDir");
Path runtimeHomeDir = config.getPathProperty("runtimeHomeDir");
String histoDBServiceUrl = config.getStringProperty("histoDBServiceUrl");
String histoDBUser = config.getStringProperty("histoDBUser");
String histoDBPassword = config.getStringProperty("histoDBPassword");
boolean debug = config.getBooleanProperty("debug", false);
Integer checkModule0 = config.getOptionalIntProperty("checkModule0");
double percpuGaussLoad = config.getDoubleProperty("percpuGaussLoad");
Expand All @@ -139,7 +124,7 @@ public static ForecastErrorsAnalyzerConfig load() {
double thresGUI = config.getDoubleProperty("thresGUI");
String nats = config.getStringProperty("nats","All");

return new ForecastErrorsAnalyzerConfig(binariesDir, runtimeHomeDir, histoDBServiceUrl, histoDBUser, histoDBPassword, checkModule0, percpuGaussLoad, percpuGaussRes, correlationGauss, tolVar, nMinObsFract, nMinObsInterv, imputationMeth, nGaussians, kOutlier, tolerance, iterations, epsilo, conditionalSampling, tFlags, histo_estremeQ, thresGUI, nats, rngSeed, debug);
return new ForecastErrorsAnalyzerConfig(binariesDir, runtimeHomeDir, checkModule0, percpuGaussLoad, percpuGaussRes, correlationGauss, tolVar, nMinObsFract, nMinObsInterv, imputationMeth, nGaussians, kOutlier, tolerance, iterations, epsilo, conditionalSampling, tFlags, histo_estremeQ, thresGUI, nats, rngSeed, debug);
}

public Path getBinariesDir() {
Expand All @@ -150,18 +135,6 @@ public Path getRuntimeHomeDir() {
return runtimeHomeDir;
}

public String getHistoDBUser() {
return histoDBUser;
}

public String getHistoDBPassword() {
return histoDBPassword;
}

public String getHistoDBServiceUrl() {
return histoDBServiceUrl;
}

public Integer getCheckModule0() {
return checkModule0;
}
Expand Down Expand Up @@ -231,7 +204,6 @@ public boolean isDebug() {
@Override
public String toString() {
return "ForecastErrorsAnalyzerConfig [binariesDir=" + binariesDir + ", runtimeHomeDir=" + runtimeHomeDir
+ ", histoDBUser=" + histoDBUser + ", histoDBPassword=*" + ", histoDBServiceUrl=" + histoDBServiceUrl
+ ", check module0=" + checkModule0
+ ", per cpu gauss load=" + percpuGaussLoad
+ ", per cpu gauss res=" + percpuGaussRes
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
* Copyright (c) 2017, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
Expand All @@ -8,6 +9,7 @@

import eu.itesla_project.computation.ComputationManager;
import eu.itesla_project.iidm.network.Network;
import eu.itesla_project.modules.histo.HistoDbClient;
import eu.itesla_project.modules.mcla.ForecastErrorsAnalyzer;
import eu.itesla_project.modules.mcla.ForecastErrorsAnalyzerFactory;
import eu.itesla_project.modules.mcla.ForecastErrorsDataStorage;
Expand All @@ -18,9 +20,10 @@
*/
public class ForecastErrorsAnalyzerFactoryImpl implements ForecastErrorsAnalyzerFactory {

@Override
public ForecastErrorsAnalyzer create(Network network, ComputationManager computationManager, ForecastErrorsDataStorage forecastErrorsDataStorage) {
return new ForecastErrorsAnalyzerImpl(network, computationManager, forecastErrorsDataStorage);
}
@Override
public ForecastErrorsAnalyzer create(Network network, ComputationManager computationManager,
ForecastErrorsDataStorage forecastErrorsDataStorage, HistoDbClient histoDbClient) {
return new ForecastErrorsAnalyzerImpl(network, computationManager, forecastErrorsDataStorage, histoDbClient);
}

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/**
* Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium)
* Copyright (c) 2016, RTE (http://www.rte-france.com)
* Copyright (c) 2016-2017, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
Expand All @@ -11,10 +11,12 @@
import eu.itesla_project.iidm.network.Network;
import eu.itesla_project.mcla.NetworkUtils;
import eu.itesla_project.mcla.forecast_errors.data.ForecastErrorsHistoricalData;
import eu.itesla_project.modules.histo.HistoDbClient;
import eu.itesla_project.modules.mcla.ForecastErrorsAnalyzer;
import eu.itesla_project.modules.mcla.ForecastErrorsAnalyzerParameters;
import eu.itesla_project.modules.mcla.ForecastErrorsDataStorage;
import eu.itesla_project.modules.online.TimeHorizon;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -55,29 +57,29 @@ public class ForecastErrorsAnalyzerImpl implements ForecastErrorsAnalyzer {

private final ComputationManager computationManager;
private final ForecastErrorsDataStorage forecastErrorsDataStorage;
private final HistoDbClient histoDbClient;

private Network network;
private ForecastErrorsAnalyzerConfig config = null;
private ForecastErrorsAnalyzerParameters parameters;
private ArrayList<String> generatorsIds = new ArrayList<String>();
private ArrayList<String> loadsIds = new ArrayList<String>();

public ForecastErrorsAnalyzerImpl(Network network, ComputationManager computationManager, ForecastErrorsDataStorage forecastErrorsDataStorage,
ForecastErrorsAnalyzerConfig config) {
Objects.requireNonNull(network, "network is null");
Objects.requireNonNull(computationManager, "computation manager is null");
Objects.requireNonNull(forecastErrorsDataStorage, "forecast errors data storage is null");
Objects.requireNonNull(config, "config is null");
LOGGER.info(config.toString());
this.network = network;
this.computationManager = computationManager;
this.forecastErrorsDataStorage = forecastErrorsDataStorage;
this.config = config;
}

public ForecastErrorsAnalyzerImpl(Network network, ComputationManager computationManager, ForecastErrorsDataStorage forecastErrorsDataStorage) {
this(network, computationManager, forecastErrorsDataStorage, ForecastErrorsAnalyzerConfig.load());
}
public ForecastErrorsAnalyzerImpl(Network network, ComputationManager computationManager,
ForecastErrorsDataStorage forecastErrorsDataStorage,
HistoDbClient histoDbClient, ForecastErrorsAnalyzerConfig config) {
this.network = Objects.requireNonNull(network, "network is null");
this.computationManager = Objects.requireNonNull(computationManager, "computation manager is null");
this.forecastErrorsDataStorage = Objects.requireNonNull(forecastErrorsDataStorage, "forecast errors data storage is null");
this.histoDbClient = Objects.requireNonNull(histoDbClient, "HistoDb client is null");
this.config = Objects.requireNonNull(config, "config is null");
LOGGER.info(config.toString());
}

public ForecastErrorsAnalyzerImpl(Network network, ComputationManager computationManager,
ForecastErrorsDataStorage forecastErrorsDataStorage, HistoDbClient histoDbClient) {
this(network, computationManager, forecastErrorsDataStorage, histoDbClient, ForecastErrorsAnalyzerConfig.load());
}

@Override
public String getName() {
Expand Down Expand Up @@ -110,19 +112,12 @@ public void run(TimeHorizon timeHorizon) throws Exception {
final Path workingDir = executor.getWorkingDir();

// get forecast errors historical data from histodb
FEAHistoDBFacade histoDBFacade = new FEAHistoDBFacade(
new DataMiningFacadeRestConfig(
config.getHistoDBServiceUrl(),
config.getHistoDBUser(),
config.getHistoDBPassword(),
workingDir,
config.isDebug()),
timeHorizon,
parameters.getHistoInterval(),
generatorsIds,
loadsIds);
Path historicalDataCsvFile = Paths.get(workingDir.toString(), FEACSVFILENAME);
histoDBFacade.historicalDataToCsvFile(historicalDataCsvFile);
Path historicalDataCsvFile = workingDir.resolve(FEACSVFILENAME);
FEAHistoDBFacade.historicalDataToCsvFile(histoDbClient,
generatorsIds,
loadsIds,
parameters.getHistoInterval(),
historicalDataCsvFile);

//Path historicalDataCsvFile = Paths.get("/itesla_data/MAT", "forecastsDiff_7nodes.csv");
ForecastErrorsHistoricalData forecastErrorsHistoricalData = new HistoricalDataCreator(network, generatorsIds, loadsIds)
Expand Down

0 comments on commit d086e64

Please sign in to comment.