Skip to content

Commit

Permalink
Enhance CSV loaders
Browse files Browse the repository at this point in the history
Add CharSource parser methods
Fixes #1267
  • Loading branch information
jodastephen committed Aug 5, 2016
1 parent c848543 commit 136c997
Show file tree
Hide file tree
Showing 10 changed files with 416 additions and 118 deletions.
Expand Up @@ -27,6 +27,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.io.CharSource;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.index.Index;
import com.opengamma.strata.collect.MapStream;
Expand All @@ -42,6 +43,7 @@
import com.opengamma.strata.market.curve.CurveGroupEntry;
import com.opengamma.strata.market.curve.CurveGroupName;
import com.opengamma.strata.market.curve.CurveName;
import com.opengamma.strata.market.curve.NodalCurveDefinition;

/**
* Loads a set of curve group definitions into memory by reading from CSV resources.
Expand Down Expand Up @@ -77,29 +79,58 @@ public final class CurveGroupDefinitionCsvLoader {
//-------------------------------------------------------------------------
/**
* Loads the curve groups definition CSV file.
* <p>
* The list of {@link NodalCurveDefinition} will be empty in the resulting definition.
*
* @param groupsResource the curve groups CSV resource
* @return the set of IDs specifying how each curve is used, keyed by the name of the curve
* @return the list of definitions
* @deprecated Use better named {@link #loadCurveGroupDefinitions(ResourceLocator)}
*/
@Deprecated
public static List<CurveGroupDefinition> loadCurveGroups(ResourceLocator groupsResource) {
return loadCurveGroupDefinitions(groupsResource);
}

/**
* Loads the curve groups definition CSV file.
* <p>
* The list of {@link NodalCurveDefinition} will be empty in the resulting definition.
*
* @param groupsResource the curve groups CSV resource
* @return the list of definitions
*/
public static List<CurveGroupDefinition> loadCurveGroupDefinitions(ResourceLocator groupsResource) {
return parseCurveGroupDefinitions(groupsResource.getCharSource());
}

//-------------------------------------------------------------------------
/**
* Parses the curve groups definition CSV file.
* <p>
* The list of {@link NodalCurveDefinition} will be empty in the resulting definition.
*
* @param groupsCharSource the curve groups CSV character source
* @return the list of definitions
*/
public static List<CurveGroupDefinition> parseCurveGroupDefinitions(CharSource groupsCharSource) {
Map<CurveName, Set<GroupAndReference>> curveGroups = new LinkedHashMap<>();
CsvFile csv = CsvFile.of(groupsResource.getCharSource(), true);
CsvFile csv = CsvFile.of(groupsCharSource, true);
for (CsvRow row : csv.rows()) {
String curveGroupStr = row.getField(GROUPS_NAME);
String curveTypeStr = row.getField(GROUPS_CURVE_TYPE);
String referenceStr = row.getField(GROUPS_REFERENCE);
String curveNameStr = row.getField(GROUPS_CURVE_NAME);

GroupAndReference gar = createCurveId(CurveGroupName.of(curveGroupStr), curveTypeStr, referenceStr);
GroupAndReference gar = createKey(CurveGroupName.of(curveGroupStr), curveTypeStr, referenceStr);
CurveName curveName = CurveName.of(curveNameStr);
Set<GroupAndReference> curveUses = curveGroups.computeIfAbsent(curveName, k -> new LinkedHashSet<>());
curveUses.add(gar);
curveGroups.computeIfAbsent(curveName, k -> new LinkedHashSet<>()).add(gar);
}
return buildCurveGroups(curveGroups);
}

//-------------------------------------------------------------------------
// parses the identifier
private static GroupAndReference createCurveId(
private static GroupAndReference createKey(
CurveGroupName curveGroup,
String curveTypeStr,
String referenceStr) {
Expand All @@ -119,12 +150,12 @@ private static GroupAndReference createCurveId(
}

/**
* Builds a list of curve group definitions from the map of curves and their IDs.
* Builds a list of curve group definitions from the map of curves and their keys.
* <p>
* The curve IDs specify which curve groups each curve belongs to and how it is used in the group, for example
* The keys specify which curve groups each curve belongs to and how it is used in the group, for example
* as a discount curve for a particular currency or as a forward curve for an index.
*
* @param garMap the map of group-reference pairs
* @param garMap the map of name to keyss
* @return a map of curve group name to curve group definition built from the curves
*/
private static ImmutableList<CurveGroupDefinition> buildCurveGroups(
Expand All @@ -150,7 +181,7 @@ private static ImmutableList<CurveGroupDefinition> buildCurveGroups(
}

/**
* Creates a curve group entry for a curve from a list of the curve's IDs from the same curve group.
* Creates a curve group entry for a curve from a list of keys from the same curve group.
*
* @param curveName the name of the curve
* @param gars the group-reference pairs
Expand Down
Expand Up @@ -5,13 +5,16 @@
*/
package com.opengamma.strata.loader.csv;

import static java.util.stream.Collectors.toList;

import java.time.LocalDate;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;

import com.google.common.collect.ImmutableMap;
import com.google.common.io.CharSource;
import com.opengamma.strata.basics.index.Index;
import com.opengamma.strata.collect.MapStream;
import com.opengamma.strata.collect.Messages;
Expand Down Expand Up @@ -78,20 +81,35 @@ public static ImmutableMap<ObservableId, LocalDateDoubleTimeSeries> load(Resourc
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<ObservableId, LocalDateDoubleTimeSeries> load(Collection<ResourceLocator> resources) {
Collection<CharSource> charSources = resources.stream().map(r -> r.getCharSource()).collect(toList());
return parse(charSources);
}

//-------------------------------------------------------------------------
/**
* Parses one or more CSV format fixing series files.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param charSources the fixing series CSV character sources
* @return the loaded fixing series, mapped by {@linkplain ObservableId observable ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<ObservableId, LocalDateDoubleTimeSeries> parse(Collection<CharSource> charSources) {
// builder ensures keys can only be seen once
ImmutableMap.Builder<ObservableId, LocalDateDoubleTimeSeries> builder = ImmutableMap.builder();
for (ResourceLocator timeSeriesResource : resources) {
builder.putAll(loadSingle(timeSeriesResource));
for (CharSource charSource : charSources) {
builder.putAll(parseSingle(charSource));
}
return builder.build();
}

//-------------------------------------------------------------------------
// loads a single fixing series CSV file
private static ImmutableMap<ObservableId, LocalDateDoubleTimeSeries> loadSingle(ResourceLocator resource) {
private static ImmutableMap<ObservableId, LocalDateDoubleTimeSeries> parseSingle(CharSource resource) {
Map<ObservableId, LocalDateDoubleTimeSeriesBuilder> builders = new HashMap<>();
try {
CsvFile csv = CsvFile.of(resource.getCharSource(), true);
CsvFile csv = CsvFile.of(resource, true);
for (CsvRow row : csv.rows()) {
String referenceStr = row.getField(REFERENCE_FIELD);
String dateStr = row.getField(DATE_FIELD);
Expand Down
Expand Up @@ -5,13 +5,23 @@
*/
package com.opengamma.strata.loader.csv;

import static java.util.stream.Collectors.toList;

import java.time.LocalDate;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.function.Predicate;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.io.CharSource;
import com.opengamma.strata.basics.currency.CurrencyPair;
import com.opengamma.strata.basics.currency.FxRate;
import com.opengamma.strata.collect.Messages;
import com.opengamma.strata.collect.io.CsvFile;
import com.opengamma.strata.collect.io.CsvRow;
import com.opengamma.strata.collect.io.ResourceLocator;
Expand Down Expand Up @@ -54,7 +64,7 @@ public final class FxRatesCsvLoader {
* Only those rates that match the specified date will be loaded.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
*
* @param marketDataDate the date to load
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@linkplain FxRateId rate ID}
Expand All @@ -70,44 +80,145 @@ public static ImmutableMap<FxRateId, FxRate> load(LocalDate marketDataDate, Reso
* Only those rates that match the specified date will be loaded.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
*
* @param marketDataDate the date to load
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<FxRateId, FxRate> load(LocalDate marketDataDate, Collection<ResourceLocator> resources) {
// builder ensures keys can only be seen once
ImmutableMap.Builder<FxRateId, FxRate> builder = ImmutableMap.builder();
Collection<CharSource> charSources = resources.stream().map(r -> r.getCharSource()).collect(toList());
return parse(d -> marketDataDate.equals(d), charSources).getOrDefault(marketDataDate, ImmutableMap.of());
}

//-------------------------------------------------------------------------
/**
* Loads one or more CSV format FX rate files for a set of dates.
* <p>
* Only those rates that match one of the specified dates will be loaded.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param marketDataDates the set of dates to load
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> load(
Set<LocalDate> marketDataDates,
ResourceLocator... resources) {

return load(marketDataDates, Arrays.asList(resources));
}

/**
* Loads one or more CSV format FX rate files for a set of dates.
* <p>
* Only those rates that match one of the specified dates will be loaded.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param marketDataDates the dates to load
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> load(
Set<LocalDate> marketDataDates,
Collection<ResourceLocator> resources) {

Collection<CharSource> charSources = resources.stream().map(r -> r.getCharSource()).collect(toList());
return parse(d -> marketDataDates.contains(d), charSources);
}

//-------------------------------------------------------------------------
/**
* Loads one or more CSV format FX rate files.
* <p>
* All dates that are found will be returned.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> loadAllDates(ResourceLocator... resources) {
return loadAllDates(Arrays.asList(resources));
}

/**
* Loads one or more CSV format FX rate files.
* <p>
* All dates that are found will be returned.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param resources the CSV resources
* @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> loadAllDates(
Collection<ResourceLocator> resources) {

for (ResourceLocator timeSeriesResource : resources) {
loadSingle(marketDataDate, timeSeriesResource, builder);
Collection<CharSource> charSources = resources.stream().map(r -> r.getCharSource()).collect(toList());
return parse(d -> true, charSources);
}

//-------------------------------------------------------------------------
/**
* Parses one or more CSV format FX rate files.
* <p>
* A predicate is specified that is used to filter the dates that are returned.
* This could match a single date, a set of dates or all dates.
* <p>
* If the files contain a duplicate entry an exception will be thrown.
*
* @param datePredicate the predicate used to select the dates
* @param charSources the CSV character sources
* @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID}
* @throws IllegalArgumentException if the files contain a duplicate entry
*/
public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> parse(
Predicate<LocalDate> datePredicate,
Collection<CharSource> charSources) {

// builder ensures keys can only be seen once
Map<LocalDate, ImmutableMap.Builder<FxRateId, FxRate>> mutableMap = new HashMap<>();
for (CharSource charSource : charSources) {
parseSingle(datePredicate, charSource, mutableMap);
}
ImmutableMap.Builder<LocalDate, ImmutableMap<FxRateId, FxRate>> builder = ImmutableMap.builder();
for (Entry<LocalDate, Builder<FxRateId, FxRate>> entry : mutableMap.entrySet()) {
builder.put(entry.getKey(), entry.getValue().build());
}
return builder.build();
}

//-------------------------------------------------------------------------
// loads a single CSV file
private static void loadSingle(
LocalDate marketDataDate,
ResourceLocator resource,
ImmutableMap.Builder<FxRateId, FxRate> builder) {
// loads a single CSV file, filtering by date
private static void parseSingle(
Predicate<LocalDate> datePredicate,
CharSource resource,
Map<LocalDate, ImmutableMap.Builder<FxRateId, FxRate>> mutableMap) {

try {
CsvFile csv = CsvFile.of(resource.getCharSource(), true);
CsvFile csv = CsvFile.of(resource, true);
for (CsvRow row : csv.rows()) {
String dateText = row.getField(DATE_FIELD);
LocalDate date = LocalDate.parse(dateText);
if (date.equals(marketDataDate)) {
if (datePredicate.test(date)) {
String currencyPairStr = row.getField(CURRENCY_PAIR_FIELD);
String valueStr = row.getField(VALUE_FIELD);
CurrencyPair currencyPair = CurrencyPair.parse(currencyPairStr);
double value = Double.valueOf(valueStr);
builder.put(FxRateId.of(currencyPair), FxRate.of(currencyPair, value));

ImmutableMap.Builder<FxRateId, FxRate> builderForDate = mutableMap.computeIfAbsent(date, k -> ImmutableMap.builder());
builderForDate.put(FxRateId.of(currencyPair), FxRate.of(currencyPair, value));
}
}
} catch (RuntimeException ex) {
throw new IllegalArgumentException("Error processing resource as CSV file: " + resource, ex);
throw new IllegalArgumentException(
Messages.format("Error processing resource as CSV file: {}", resource), ex);
}
}

Expand Down

0 comments on commit 136c997

Please sign in to comment.