diff --git a/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java b/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java index 286fa5593..fae1637b6 100644 --- a/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java +++ b/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java @@ -10,6 +10,7 @@ import com.conveyal.analysis.persistence.AnalysisDB; import com.conveyal.analysis.util.JsonUtil; import com.conveyal.file.FileStorage; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.r5.analyst.progress.Task; import com.fasterxml.jackson.databind.node.ObjectNode; import org.bson.conversions.Bson; @@ -27,6 +28,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.mongodb.client.model.Filters.and; import static com.mongodb.client.model.Filters.eq; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; /** * Stores vector aggregationAreas (used to define the region of a weighted average accessibility metric). @@ -98,10 +100,10 @@ private Collection getAggregationAreas (Request req, Response r } /** Returns a JSON-wrapped URL for the mask grid of the aggregation area whose id matches the path parameter. */ - private ObjectNode getAggregationAreaGridUrl (Request req, Response res) { + private UrlWithHumanName getAggregationAreaGridUrl (Request req, Response res) { AggregationArea aggregationArea = aggregationAreaCollection.findPermittedByRequestParamId(req); - String url = fileStorage.getURL(aggregationArea.getStorageKey()); - return JsonUtil.objectNode().put("url", url); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(aggregationArea.getStorageKey(), aggregationArea.name, "grid"); } @Override diff --git a/src/main/java/com/conveyal/analysis/controllers/LocalFilesController.java b/src/main/java/com/conveyal/analysis/controllers/LocalFilesController.java index c92fbbb33..a5ef44f74 100644 --- a/src/main/java/com/conveyal/analysis/controllers/LocalFilesController.java +++ b/src/main/java/com/conveyal/analysis/controllers/LocalFilesController.java @@ -33,7 +33,9 @@ private Object getFile (Request req, Response res) throws Exception { FileStorageKey key = new FileStorageKey(category, filename); File file = fileStorage.getFile(key); FileStorageFormat format = FileStorageFormat.fromFilename(filename); - res.type(format.mimeType); + if (format != null) { + res.type(format.mimeType); + } // If the content-encoding is set to gzip, Spark automatically gzips the response. This double-gzips anything // that was already gzipped. Some of our files are already gzipped, and we rely on the the client browser to diff --git a/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java b/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java index b1afcfc05..c28363e0a 100644 --- a/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java +++ b/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java @@ -18,6 +18,7 @@ import com.conveyal.file.FileStorageFormat; import com.conveyal.file.FileStorageKey; import com.conveyal.file.FileUtils; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.r5.analyst.FreeFormPointSet; import com.conveyal.r5.analyst.Grid; import com.conveyal.r5.analyst.PointSet; @@ -61,6 +62,7 @@ import static com.conveyal.file.FileCategory.GRIDS; import static com.conveyal.r5.analyst.WebMercatorExtents.parseZoom; import static com.conveyal.r5.analyst.progress.WorkProductType.OPPORTUNITY_DATASET; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; /** * Controller that handles fetching opportunity datasets (grids and other pointset formats). @@ -94,10 +96,6 @@ public OpportunityDatasetController ( /** Store upload status objects FIXME trivial Javadoc */ private final List uploadStatuses = new ArrayList<>(); - private ObjectNode getJsonUrl (FileStorageKey key) { - return JsonUtil.objectNode().put("url", fileStorage.getURL(key)); - } - private void addStatusAndRemoveOldStatuses(OpportunityDatasetUploadStatus status) { uploadStatuses.add(status); LocalDateTime now = LocalDateTime.now(); @@ -113,10 +111,11 @@ private Collection getRegionDatasets(Request req, Response r ); } - private Object getOpportunityDataset(Request req, Response res) { + private UrlWithHumanName getOpportunityDataset(Request req, Response res) { OpportunityDataset dataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req); if (dataset.format == FileStorageFormat.GRID) { - return getJsonUrl(dataset.getStorageKey()); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(dataset.getStorageKey(), dataset.sourceName + "_" + dataset.name, "grid"); } else { // Currently the UI can only visualize grids, not other kinds of datasets (freeform points). // We do generate a rasterized grid for each of the freeform pointsets we create, so ideally we'd redirect @@ -564,9 +563,10 @@ private List createGridsFromShapefile(List fileItems, * Respond to a request with a redirect to a downloadable file. * @param req should specify regionId, opportunityDatasetId, and an available download format (.tiff or .grid) */ - private Object downloadOpportunityDataset (Request req, Response res) throws IOException { + private UrlWithHumanName downloadOpportunityDataset (Request req, Response res) throws IOException { FileStorageFormat downloadFormat; String format = req.params("format"); + res.type(APPLICATION_JSON.asString()); try { downloadFormat = FileStorageFormat.valueOf(format.toUpperCase()); } catch (IllegalArgumentException iae) { @@ -576,38 +576,32 @@ private Object downloadOpportunityDataset (Request req, Response res) throws IOE String regionId = req.params("_id"); String gridKey = format; FileStorageKey storageKey = new FileStorageKey(GRIDS, String.format("%s/%s.grid", regionId, gridKey)); - return getJsonUrl(storageKey); + return fileStorage.getJsonUrl(storageKey, gridKey, "grid"); + } + if (FileStorageFormat.GRID.equals(downloadFormat)) { + return getOpportunityDataset(req, res); } - - if (FileStorageFormat.GRID.equals(downloadFormat)) return getOpportunityDataset(req, res); - final OpportunityDataset opportunityDataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req); - FileStorageKey gridKey = opportunityDataset.getStorageKey(FileStorageFormat.GRID); FileStorageKey formatKey = opportunityDataset.getStorageKey(downloadFormat); - // if this grid is not on S3 in the requested format, try to get the .grid format if (!fileStorage.exists(gridKey)) { throw AnalysisServerException.notFound("Requested grid does not exist."); } - if (!fileStorage.exists(formatKey)) { // get the grid and convert it to the requested format File gridFile = fileStorage.getFile(gridKey); Grid grid = Grid.read(new GZIPInputStream(new FileInputStream(gridFile))); // closes input stream File localFile = FileUtils.createScratchFile(downloadFormat.toString()); FileOutputStream fos = new FileOutputStream(localFile); - if (FileStorageFormat.PNG.equals(downloadFormat)) { grid.writePng(fos); } else if (FileStorageFormat.GEOTIFF.equals(downloadFormat)) { grid.writeGeotiff(fos); } - fileStorage.moveIntoStorage(formatKey, localFile); } - - return getJsonUrl(formatKey); + return fileStorage.getJsonUrl(formatKey, opportunityDataset.sourceName + "_" + opportunityDataset.name, downloadFormat.extension); } /** diff --git a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java index b08cb8a9b..a60eccd2b 100644 --- a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java +++ b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java @@ -6,6 +6,7 @@ import com.conveyal.analysis.components.broker.Broker; import com.conveyal.analysis.components.broker.JobStatus; import com.conveyal.analysis.models.AnalysisRequest; +import com.conveyal.analysis.models.Model; import com.conveyal.analysis.models.OpportunityDataset; import com.conveyal.analysis.models.RegionalAnalysis; import com.conveyal.analysis.persistence.Persistence; @@ -15,12 +16,12 @@ import com.conveyal.file.FileStorageFormat; import com.conveyal.file.FileStorageKey; import com.conveyal.file.FileUtils; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.r5.analyst.FreeFormPointSet; import com.conveyal.r5.analyst.Grid; import com.conveyal.r5.analyst.PointSet; import com.conveyal.r5.analyst.PointSetCache; import com.conveyal.r5.analyst.cluster.RegionalTask; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.primitives.Ints; import com.mongodb.QueryBuilder; import gnu.trove.list.array.TIntArrayList; @@ -35,19 +36,30 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.net.URI; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.zip.GZIPOutputStream; import static com.conveyal.analysis.util.JsonUtil.toJson; import static com.conveyal.file.FileCategory.BUNDLES; import static com.conveyal.file.FileCategory.RESULTS; +import static com.conveyal.file.UrlWithHumanName.filenameCleanString; import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; +import static org.eclipse.jetty.http.MimeTypes.Type.TEXT_HTML; /** * Spark HTTP handler methods that allow launching new regional analyses, as well as deleting them and fetching @@ -157,27 +169,198 @@ private int getIntQueryParameter (Request req, String parameterName, int default } /** - * This used to extract a particular percentile of a regional analysis as a grid file. - * Now it just gets the single percentile that exists for any one analysis, either from the local buffer file - * for an analysis still in progress, or from S3 for a completed analysis. + * Associate a storage key with a human-readable name. + * Currently, this record type is only used within the RegionalAnalysisController class. */ - private Object getRegionalResults (Request req, Response res) throws IOException { + private record HumanKey(FileStorageKey storageKey, String humanName) { }; - // Get some path parameters out of the URL. - // The UUID of the regional analysis for which we want the output data + /** + * Get a regional analysis results raster for a single (percentile, cutoff, destination) combination, in one of + * several image file formats. This method was factored out for use from two different API endpoints, one for + * fetching a single grid, and another for fetching grids for all combinations of parameters at once. + * It returns the unique FileStorageKey for those results, associated with a non-unique human-readable name. + */ + private HumanKey getSingleCutoffGrid ( + RegionalAnalysis analysis, + OpportunityDataset destinations, + int cutoffMinutes, + int percentile, + FileStorageFormat fileFormat + ) throws IOException { + final String regionalAnalysisId = analysis._id; + final String destinationPointSetId = destinations._id; + // Selecting the zeroth cutoff still makes sense for older analyses that don't allow an array of N cutoffs. + int cutoffIndex = 0; + if (analysis.cutoffsMinutes != null) { + cutoffIndex = new TIntArrayList(analysis.cutoffsMinutes).indexOf(cutoffMinutes); + checkState(cutoffIndex >= 0); + } + LOG.info( + "Returning {} minute accessibility to pointset {} (percentile {}) for regional analysis {} in format {}.", + cutoffMinutes, destinationPointSetId, percentile, regionalAnalysisId, fileFormat + ); + // Analysis grids now have the percentile and cutoff in their S3 key, because there can be many of each. + // We do this even for results generated by older workers, so they will be re-extracted with the new name. + // These grids are reasonably small, we may be able to just send all cutoffs to the UI instead of selecting. + String singleCutoffKey = String.format( + "%s_%s_P%d_C%d.%s", + regionalAnalysisId, destinationPointSetId, percentile, cutoffMinutes, + fileFormat.extension.toLowerCase(Locale.ROOT) + ); + FileStorageKey singleCutoffFileStorageKey = new FileStorageKey(RESULTS, singleCutoffKey); + if (!fileStorage.exists(singleCutoffFileStorageKey)) { + // An accessibility grid for this particular cutoff has apparently never been extracted from the + // regional results file before. Extract one and save it for future reuse. Older regional analyses + // did not have arrays allowing multiple cutoffs, percentiles, or destination pointsets. The + // filenames of such regional accessibility results will not have a percentile or pointset ID. + // First try the newest form of regional results: multi-percentile, multi-destination-grid. + String multiCutoffKey = String.format("%s_%s_P%d.access", regionalAnalysisId, destinationPointSetId, percentile); + FileStorageKey multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); + if (!fileStorage.exists(multiCutoffFileStorageKey)) { + LOG.warn("Falling back to older file name formats for regional results file: " + multiCutoffKey); + // Fall back to second-oldest form: multi-percentile, single destination grid. + multiCutoffKey = String.format("%s_P%d.access", regionalAnalysisId, percentile); + multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); + if (fileStorage.exists(multiCutoffFileStorageKey)) { + checkArgument(analysis.destinationPointSetIds.length == 1); + } else { + // Fall back on oldest form of results, single-percentile, single-destination-grid. + multiCutoffKey = regionalAnalysisId + ".access"; + multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); + if (fileStorage.exists(multiCutoffFileStorageKey)) { + checkArgument(analysis.travelTimePercentiles.length == 1); + checkArgument(analysis.destinationPointSetIds.length == 1); + } else { + throw AnalysisServerException.notFound("Cannot find original source regional analysis output."); + } + } + } + LOG.debug("Single-cutoff grid {} not found on S3, deriving it from {}.", singleCutoffKey, multiCutoffKey); + + InputStream multiCutoffInputStream = new FileInputStream(fileStorage.getFile(multiCutoffFileStorageKey)); + Grid grid = new SelectingGridReducer(cutoffIndex).compute(multiCutoffInputStream); + + File localFile = FileUtils.createScratchFile(fileFormat.toString()); + FileOutputStream fos = new FileOutputStream(localFile); + + switch (fileFormat) { + case GRID: + grid.write(new GZIPOutputStream(fos)); + break; + case PNG: + grid.writePng(fos); + break; + case GEOTIFF: + grid.writeGeotiff(fos); + break; + } + + fileStorage.moveIntoStorage(singleCutoffFileStorageKey, localFile); + } + String analysisHumanName = humanNameForEntity(analysis); + String destinationHumanName = humanNameForEntity(destinations); + String resultHumanFilename = filenameCleanString( + String.format("%s_%s_P%d_C%d", analysisHumanName, destinationHumanName, percentile, cutoffMinutes) + ) + "." + fileFormat.extension.toLowerCase(Locale.ROOT); + // Note that the returned human filename already contains the appropriate extension. + return new HumanKey(singleCutoffFileStorageKey, resultHumanFilename); + } + + private Object getAllRegionalResults (Request req, Response res) throws IOException { final String regionalAnalysisId = req.params("_id"); - // The response file format: PNG, TIFF, or GRID - final String fileFormatExtension = req.params("format"); + final UserPermissions userPermissions = UserPermissions.from(req); + final RegionalAnalysis analysis = getAnalysis(regionalAnalysisId, userPermissions); + if (analysis.cutoffsMinutes == null || analysis.travelTimePercentiles == null || analysis.destinationPointSetIds == null) { + throw AnalysisServerException.badRequest("Batch result download is not available for legacy regional results."); + } + if (analysis.request.originPointSetKey != null) { + throw AnalysisServerException.badRequest("Batch result download only available for gridded origins."); + } + FileStorageKey zippedResultsKey = new FileStorageKey(RESULTS, analysis._id + "_ALL.zip"); + if (!fileStorage.exists(zippedResultsKey)) { + // Iterate over all dest, cutoff, percentile combinations and generate one geotiff grid output for each one. + List humanKeys = new ArrayList<>(); + for (String destinationPointSetId : analysis.destinationPointSetIds) { + OpportunityDataset destinations = getDestinations(destinationPointSetId, userPermissions); + for (int cutoffMinutes : analysis.cutoffsMinutes) { + for (int percentile : analysis.travelTimePercentiles) { + HumanKey gridKey = getSingleCutoffGrid( + analysis, destinations, cutoffMinutes, percentile, FileStorageFormat.GEOTIFF + ); + humanKeys.add(gridKey); + } + } + } + File tempZipFile = File.createTempFile("regional", ".zip"); + // Zipfs can't open existing empty files, the file has to not exist. FIXME: Non-dangerous race condition + // Examining ZipFileSystemProvider reveals a "useTempFile" env parameter, but this is for the individual entries. + // May be better to just use zipOutputStream which would also allow gzip - zip CSV conversion. + tempZipFile.delete(); + Map env = Map.of("create", "true"); + URI uri = URI.create("jar:file:" + tempZipFile.getAbsolutePath()); + try (FileSystem zipFilesystem = FileSystems.newFileSystem(uri, env)) { + for (HumanKey key : humanKeys) { + Path storagePath = fileStorage.getFile(key.storageKey).toPath(); + Path zipPath = zipFilesystem.getPath(key.humanName); + Files.copy(storagePath, zipPath, StandardCopyOption.REPLACE_EXISTING); + } + } + fileStorage.moveIntoStorage(zippedResultsKey, tempZipFile); + } + res.type(APPLICATION_JSON.asString()); + String analysisHumanName = humanNameForEntity(analysis); + return fileStorage.getJsonUrl(zippedResultsKey, analysisHumanName, "zip"); + } + + /** + * Given an Entity, make a human-readable name for the entity composed of its user-supplied name as well as + * the most rapidly changing digits of its ID to disambiguate in case multiple entities have the same name. + * It is also possible to find the exact entity in many web UI fields using this suffix of its ID. + */ + private static String humanNameForEntity (Model entity) { + // Most or all IDs encountered are MongoDB ObjectIDs. The first four and middle five bytes are slow-changing + // and would not disambiguate between data sets. Only the 3-byte counter at the end will be sure to change. + // See https://www.mongodb.com/docs/manual/reference/method/ObjectId/ + final String id = entity._id; + checkArgument(id.length() > 6, "ID had too few characters."); + String shortId = id.substring(id.length() - 6, id.length()); + String humanName = "%s_%s".formatted(filenameCleanString(entity.name), shortId); + return humanName; + } + /** Fetch destination OpportunityDataset from database, followed by a check that it was present. */ + private static OpportunityDataset getDestinations (String destinationPointSetId, UserPermissions userPermissions) { + OpportunityDataset opportunityDataset = + Persistence.opportunityDatasets.findByIdIfPermitted(destinationPointSetId, userPermissions); + checkNotNull(opportunityDataset, "Opportunity dataset could not be found in database."); + return opportunityDataset; + } + + /** Fetch RegionalAnalysis from database by ID, followed by a check that it was present and not deleted. */ + private static RegionalAnalysis getAnalysis (String analysisId, UserPermissions userPermissions) { RegionalAnalysis analysis = Persistence.regionalAnalyses.findPermitted( - QueryBuilder.start("_id").is(req.params("_id")).get(), + QueryBuilder.start("_id").is(analysisId).get(), DBProjection.exclude("request.scenario.modifications"), - UserPermissions.from(req) + userPermissions ).iterator().next(); - if (analysis == null || analysis.deleted) { throw AnalysisServerException.notFound("The specified regional analysis is unknown or has been deleted."); } + return analysis; + } + + /** Extract a particular percentile and cutoff of a regional analysis in one of several different raster formats. */ + private UrlWithHumanName getRegionalResults (Request req, Response res) throws IOException { + // It is possible that regional analysis is complete, but UI is trying to fetch gridded results when there + // aren't any (only CSV, because origins are freeform). How should we determine whether this analysis is + // expected to have no gridded results and cleanly return a 404? + final String regionalAnalysisId = req.params("_id"); + FileStorageFormat format = FileStorageFormat.valueOf(req.params("format").toUpperCase()); + if (!FileStorageFormat.GRID.equals(format) && !FileStorageFormat.PNG.equals(format) && !FileStorageFormat.GEOTIFF.equals(format)) { + throw AnalysisServerException.badRequest("Format \"" + format + "\" is invalid. Request format must be \"grid\", \"png\", or \"geotiff\"."); + } + final UserPermissions userPermissions = UserPermissions.from(req); + RegionalAnalysis analysis = getAnalysis(regionalAnalysisId, userPermissions); // Which channel to extract from results with multiple values per origin (for different travel time cutoffs) // and multiple output files per analysis (for different percentiles of travel time and/or different @@ -187,7 +370,6 @@ private Object getRegionalResults (Request req, Response res) throws IOException // are coming from deprecated fields, are not meaningful and will be overwritten below from query parameters. int percentile = analysis.travelTimePercentile; int cutoffMinutes = analysis.cutoffMinutes; - int cutoffIndex = 0; String destinationPointSetId = analysis.grid; // Handle newer regional analyses with multiple cutoffs in an array. @@ -197,8 +379,7 @@ private Object getRegionalResults (Request req, Response res) throws IOException int nCutoffs = analysis.cutoffsMinutes.length; checkState(nCutoffs > 0, "Regional analysis has no cutoffs."); cutoffMinutes = getIntQueryParameter(req, "cutoff", analysis.cutoffsMinutes[nCutoffs / 2]); - cutoffIndex = new TIntArrayList(analysis.cutoffsMinutes).indexOf(cutoffMinutes); - checkState(cutoffIndex >= 0, + checkArgument(new TIntArrayList(analysis.cutoffsMinutes).contains(cutoffMinutes), "Travel time cutoff for this regional analysis must be taken from this list: (%s)", Ints.join(", ", analysis.cutoffsMinutes) ); @@ -228,89 +409,19 @@ private Object getRegionalResults (Request req, Response res) throws IOException "Destination gridId must be one of: %s", String.join(",", analysis.destinationPointSetIds)); } - // We started implementing the ability to retrieve and display partially completed analyses. // We eventually decided these should not be available here at the same endpoint as complete, immutable results. - if (broker.findJob(regionalAnalysisId) != null) { throw AnalysisServerException.notFound("Analysis is incomplete, no results file is available."); } - - // FIXME It is possible that regional analysis is complete, but UI is trying to fetch gridded results when there - // aren't any (only CSV, because origins are freeform). - // How can we determine whether this analysis is expected to have no gridded results and cleanly return a 404? - - // The analysis has already completed, results should be stored and retrieved from S3 via redirects. - LOG.debug("Returning {} minute accessibility to pointset {} (percentile {}) for regional analysis {}.", - cutoffMinutes, destinationPointSetId, percentile, regionalAnalysisId); - FileStorageFormat format = FileStorageFormat.valueOf(fileFormatExtension.toUpperCase()); - if (!FileStorageFormat.GRID.equals(format) && !FileStorageFormat.PNG.equals(format) && !FileStorageFormat.GEOTIFF.equals(format)) { - throw AnalysisServerException.badRequest("Format \"" + format + "\" is invalid. Request format must be \"grid\", \"png\", or \"tiff\"."); - } - - // Analysis grids now have the percentile and cutoff in their S3 key, because there can be many of each. - // We do this even for results generated by older workers, so they will be re-extracted with the new name. - // These grids are reasonably small, we may be able to just send all cutoffs to the UI instead of selecting. - String singleCutoffKey = - String.format("%s_%s_P%d_C%d.%s", regionalAnalysisId, destinationPointSetId, percentile, cutoffMinutes, fileFormatExtension); - - // A lot of overhead here - UI contacts backend, backend calls S3, backend responds to UI, UI contacts S3. - FileStorageKey singleCutoffFileStorageKey = new FileStorageKey(RESULTS, singleCutoffKey); - if (!fileStorage.exists(singleCutoffFileStorageKey)) { - // An accessibility grid for this particular cutoff has apparently never been extracted from the - // regional results file before. Extract one and save it for future reuse. Older regional analyses - // did not have arrays allowing multiple cutoffs, percentiles, or destination pointsets. The - // filenames of such regional accessibility results will not have a percentile or pointset ID. - // First try the newest form of regional results: multi-percentile, multi-destination-grid. - String multiCutoffKey = String.format("%s_%s_P%d.access", regionalAnalysisId, destinationPointSetId, percentile); - FileStorageKey multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); - if (!fileStorage.exists(multiCutoffFileStorageKey)) { - LOG.warn("Falling back to older file name formats for regional results file: " + multiCutoffKey); - // Fall back to second-oldest form: multi-percentile, single destination grid. - multiCutoffKey = String.format("%s_P%d.access", regionalAnalysisId, percentile); - multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); - if (fileStorage.exists(multiCutoffFileStorageKey)) { - checkArgument(analysis.destinationPointSetIds.length == 1); - } else { - // Fall back on oldest form of results, single-percentile, single-destination-grid. - multiCutoffKey = regionalAnalysisId + ".access"; - multiCutoffFileStorageKey = new FileStorageKey(RESULTS, multiCutoffKey); - if (fileStorage.exists(multiCutoffFileStorageKey)) { - checkArgument(analysis.travelTimePercentiles.length == 1); - checkArgument(analysis.destinationPointSetIds.length == 1); - } else { - throw AnalysisServerException.notFound("Cannot find original source regional analysis output."); - } - } - } - LOG.debug("Single-cutoff grid {} not found on S3, deriving it from {}.", singleCutoffKey, multiCutoffKey); - - InputStream multiCutoffInputStream = new FileInputStream(fileStorage.getFile(multiCutoffFileStorageKey)); - Grid grid = new SelectingGridReducer(cutoffIndex).compute(multiCutoffInputStream); - - File localFile = FileUtils.createScratchFile(format.toString()); - FileOutputStream fos = new FileOutputStream(localFile); - - switch (format) { - case GRID: - grid.write(new GZIPOutputStream(fos)); - break; - case PNG: - grid.writePng(fos); - break; - case GEOTIFF: - grid.writeGeotiff(fos); - break; - } - - fileStorage.moveIntoStorage(singleCutoffFileStorageKey, localFile); - } - return JsonUtil.toJsonString( - JsonUtil.objectNode().put("url", fileStorage.getURL(singleCutoffFileStorageKey)) - ); + // Significant overhead here: UI contacts backend, backend calls S3, backend responds to UI, UI contacts S3. + OpportunityDataset destinations = getDestinations(destinationPointSetId, userPermissions); + HumanKey gridKey = getSingleCutoffGrid(analysis, destinations, cutoffMinutes, percentile, format); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(gridKey.storageKey, gridKey.humanName); } - private String getCsvResults (Request req, Response res) { + private Object getCsvResults (Request req, Response res) { final String regionalAnalysisId = req.params("_id"); final CsvResultType resultType = CsvResultType.valueOf(req.params("resultType").toUpperCase()); // If the resultType parameter received on the API is unrecognized, valueOf throws IllegalArgumentException @@ -332,7 +443,10 @@ private String getCsvResults (Request req, Response res) { FileStorageKey fileStorageKey = new FileStorageKey(RESULTS, storageKey); - res.type("text/plain"); + // TODO handle JSON with human name on UI side + // res.type(APPLICATION_JSON.asString()); + // return fileStorage.getJsonUrl(fileStorageKey, analysis.name, resultType + ".csv"); + res.type(TEXT_HTML.asString()); return fileStorage.getURL(fileStorageKey); } @@ -526,7 +640,7 @@ private RegionalAnalysis updateRegionalAnalysis (Request request, Response respo * Return a JSON-wrapped URL for the file in FileStorage containing the JSON representation of the scenario for * the given regional analysis. */ - private JsonNode getScenarioJsonUrl (Request request, Response response) { + private UrlWithHumanName getScenarioJsonUrl (Request request, Response response) { RegionalAnalysis regionalAnalysis = Persistence.regionalAnalyses.findByIdIfPermitted( request.params("_id"), DBProjection.exclude("request.scenario.modifications"), @@ -537,9 +651,9 @@ private JsonNode getScenarioJsonUrl (Request request, Response response) { final String scenarioId = regionalAnalysis.request.scenarioId; checkNotNull(networkId, "RegionalAnalysis did not contain a network ID."); checkNotNull(scenarioId, "RegionalAnalysis did not contain an embedded request with scenario ID."); - String scenarioUrl = fileStorage.getURL( - new FileStorageKey(BUNDLES, getScenarioFilename(regionalAnalysis.bundleId, scenarioId))); - return JsonUtil.objectNode().put("url", scenarioUrl); + FileStorageKey scenarioKey = new FileStorageKey(BUNDLES, getScenarioFilename(regionalAnalysis.bundleId, scenarioId)); + response.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(scenarioKey, regionalAnalysis.name, "scenario.json"); } @Override @@ -549,11 +663,11 @@ public void registerEndpoints (spark.Service sparkService) { sparkService.get("/:regionId/regional/running", this::getRunningAnalyses, toJson); }); sparkService.path("/api/regional", () -> { - // For grids, no transformer is supplied: render raw bytes or input stream rather than transforming to JSON. sparkService.get("/:_id", this::getRegionalAnalysis); - sparkService.get("/:_id/grid/:format", this::getRegionalResults); - sparkService.get("/:_id/csv/:resultType", this::getCsvResults); - sparkService.get("/:_id/scenarioJsonUrl", this::getScenarioJsonUrl); + sparkService.get("/:_id/all", this::getAllRegionalResults, toJson); + sparkService.get("/:_id/grid/:format", this::getRegionalResults, toJson); + sparkService.get("/:_id/csv/:resultType", this::getCsvResults, toJson); + sparkService.get("/:_id/scenarioJsonUrl", this::getScenarioJsonUrl, toJson); sparkService.delete("/:_id", this::deleteRegionalAnalysis, toJson); sparkService.post("", this::createRegionalAnalysis, toJson); sparkService.put("/:_id", this::updateRegionalAnalysis, toJson); diff --git a/src/main/java/com/conveyal/analysis/models/AnalysisRequest.java b/src/main/java/com/conveyal/analysis/models/AnalysisRequest.java index 80d5e507a..cda79c9b8 100644 --- a/src/main/java/com/conveyal/analysis/models/AnalysisRequest.java +++ b/src/main/java/com/conveyal/analysis/models/AnalysisRequest.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.EnumSet; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; /** @@ -175,6 +176,14 @@ public class AnalysisRequest { */ public int dualAccessibilityThreshold = 0; + /** + * Freeform (untyped) flags for enabling experimental, undocumented, or arcane behavior in backend or workers. + * This should be used to replace all previous special behavior flags that were embedded inside analysis names etc. + */ + public Set flags; + + /** Control the details of CSV regional analysis output, including whether to output IDs, names, or both. */ + public CsvResultOptions csvResultOptions = new CsvResultOptions(); /** * Create the R5 `Scenario` from this request. @@ -281,6 +290,8 @@ public void populateTask (AnalysisWorkerTask task, UserPermissions userPermissio task.includeTemporalDensity = includeTemporalDensity; task.dualAccessibilityThreshold = dualAccessibilityThreshold; + task.flags = flags; + task.csvResultOptions = csvResultOptions; } private EnumSet getEnumSetFromString (String s) { diff --git a/src/main/java/com/conveyal/analysis/models/CsvResultOptions.java b/src/main/java/com/conveyal/analysis/models/CsvResultOptions.java new file mode 100644 index 000000000..e925e5ff3 --- /dev/null +++ b/src/main/java/com/conveyal/analysis/models/CsvResultOptions.java @@ -0,0 +1,17 @@ +package com.conveyal.analysis.models; + +import com.conveyal.r5.transit.TransitLayer.EntityRepresentation; + +import static com.conveyal.r5.transit.TransitLayer.EntityRepresentation.ID_ONLY; + +/** + * API model type included in analysis requests to control details of CSV regional analysis output. + * This type is shared between AnalysisRequest (Frontend -> Broker) and AnalysisWorkerTask (Broker -> Workers). + * There is precedent for nested compound types shared across those top level request types (see DecayFunction). + */ +public class CsvResultOptions { + public EntityRepresentation routeRepresentation = ID_ONLY; + public EntityRepresentation stopRepresentation = ID_ONLY; + // Only feed ID representation is allowed to be null (no feed IDs at all, the default). + public EntityRepresentation feedRepresentation = null; +} diff --git a/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java b/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java index df289c9fe..8bcf94d26 100644 --- a/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java +++ b/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java @@ -61,6 +61,7 @@ protected synchronized void finish (String fileName) throws IOException { // There's probably a more elegant way to do this with NIO and without closing the buffer. // That would be Files.copy(File.toPath(),X) or ByteStreams.copy. + // Perhaps better: we could wrap the output buffer in a gzip output stream and zip as we write out. InputStream is = new BufferedInputStream(new FileInputStream(bufferFile)); OutputStream os = new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(gzippedResultFile))); ByteStreams.copy(is, os); diff --git a/src/main/java/com/conveyal/file/FileStorage.java b/src/main/java/com/conveyal/file/FileStorage.java index 52de21316..f9bd687cc 100644 --- a/src/main/java/com/conveyal/file/FileStorage.java +++ b/src/main/java/com/conveyal/file/FileStorage.java @@ -94,4 +94,15 @@ default InputStream getInputStream (FileCategory fileCategory, String fileName) } } + default UrlWithHumanName getJsonUrl (FileStorageKey key, String rawHumanName, String humanExtension) { + String url = this.getURL(key); + return UrlWithHumanName.fromCleanedName(url, rawHumanName, humanExtension); + } + + /** This assumes the humanFileName is already a complete filename (cleaned and truncated with any extension). */ + default UrlWithHumanName getJsonUrl (FileStorageKey key, String humanFileName) { + String url = this.getURL(key); + return new UrlWithHumanName(url, humanFileName); + } + } diff --git a/src/main/java/com/conveyal/file/FileStorageFormat.java b/src/main/java/com/conveyal/file/FileStorageFormat.java index c33569de9..e3b6e0fe0 100644 --- a/src/main/java/com/conveyal/file/FileStorageFormat.java +++ b/src/main/java/com/conveyal/file/FileStorageFormat.java @@ -1,5 +1,7 @@ package com.conveyal.file; +import java.util.Locale; + /** * An enumeration of all the file types we handle as uploads, derived internal data, or work products. * Really this should be a union of several enumerated types (upload/internal/product) but Java does not allow this. @@ -37,7 +39,12 @@ public enum FileStorageFormat { } public static FileStorageFormat fromFilename (String filename) { - String extension = filename.substring(filename.lastIndexOf(".") + 1); - return FileStorageFormat.valueOf(extension.toUpperCase()); + String extension = filename.substring(filename.lastIndexOf(".") + 1).toLowerCase(Locale.ROOT); + for (FileStorageFormat format : FileStorageFormat.values()) { + if (format.extension.equals(extension)) { + return format; + } + } + return null; } } diff --git a/src/main/java/com/conveyal/file/UrlWithHumanName.java b/src/main/java/com/conveyal/file/UrlWithHumanName.java new file mode 100644 index 000000000..f7a49c933 --- /dev/null +++ b/src/main/java/com/conveyal/file/UrlWithHumanName.java @@ -0,0 +1,43 @@ +package com.conveyal.file; + +/** + * Combines a url for downloading a file, which might include a globally unique but human-annoying UUID, with a + * suggested human-readable name for that file when saved by an end user. The humanName may not be globally unique, + * so is only appropriate for cases where it doesn't need to be machine discoverable using a UUID. The humanName can + * be used as the download attribute of an HTML link, or as the attachment name in a content-disposition header. + * Instances of this record are intended to be serialized to JSON as an HTTP API response. + */ +public class UrlWithHumanName { + public final String url; + public final String humanName; + + public UrlWithHumanName (String url, String humanName) { + this.url = url; + this.humanName = humanName; + } + + private static final int TRUNCATE_FILENAME_CHARS = 220; + + /** + * Given an arbitrary string, make it safe for use in a friendly human-readable filename. This can yield non-unique + * strings and is intended for files downloaded by end users that do not need to be machine-discoverable by unique + * IDs. A length of up to 255 characters will work with most filesystems and within ZIP files. In all names we + * generate, the end of the name more uniquely identifies it (contains a fragment of a hex object ID or contains + * the distinguishing factors such as cutoff and percentile for files within a ZIP archive). Therefore, we truncate + * to a suffix rather than a prefix when the name is too long. We keep the length somewhat under 255 in case some + * other short suffix needs to be appended before use as a filename. + * Note that this will strip dot characters out of the string, so any dot and extension must be suffixed later. + */ + public static String filenameCleanString (String original) { + String ret = original.replaceAll("\\W+", "_"); + if (ret.length() > TRUNCATE_FILENAME_CHARS) { + ret = ret.substring(ret.length() - TRUNCATE_FILENAME_CHARS, ret.length()); + } + return ret; + } + + public static UrlWithHumanName fromCleanedName (String url, String rawHumanName, String humanExtension) { + String humanName = UrlWithHumanName.filenameCleanString(rawHumanName) + "." + humanExtension; + return new UrlWithHumanName(url, humanName); + } +} diff --git a/src/main/java/com/conveyal/r5/analyst/cluster/AnalysisWorkerTask.java b/src/main/java/com/conveyal/r5/analyst/cluster/AnalysisWorkerTask.java index 30266f171..2698905fa 100644 --- a/src/main/java/com/conveyal/r5/analyst/cluster/AnalysisWorkerTask.java +++ b/src/main/java/com/conveyal/r5/analyst/cluster/AnalysisWorkerTask.java @@ -1,5 +1,6 @@ package com.conveyal.r5.analyst.cluster; +import com.conveyal.analysis.models.CsvResultOptions; import com.conveyal.r5.analyst.FreeFormPointSet; import com.conveyal.r5.analyst.Grid; import com.conveyal.r5.analyst.GridTransformWrapper; @@ -15,6 +16,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo; import java.util.Arrays; +import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; @@ -177,6 +179,15 @@ public abstract class AnalysisWorkerTask extends ProfileRequest { */ public ChaosParameters injectFault; + /** + * Freeform (untyped) flags for enabling experimental, undocumented, or arcane worker behavior. + * This should be used to replace all previous special behavior flags that were embedded inside analysis names etc. + */ + public Set flags; + + /** Control the details of CSV regional analysis output, including whether to output IDs, names, or both. */ + public CsvResultOptions csvResultOptions; + /** * Is this a single point or regional request? Needed to encode types in JSON serialization. Can that type field be * added automatically with a serializer annotation instead of by defining a getter method and two dummy methods? diff --git a/src/main/java/com/conveyal/r5/analyst/cluster/PathResult.java b/src/main/java/com/conveyal/r5/analyst/cluster/PathResult.java index c2366ac1a..43e617920 100644 --- a/src/main/java/com/conveyal/r5/analyst/cluster/PathResult.java +++ b/src/main/java/com/conveyal/r5/analyst/cluster/PathResult.java @@ -1,5 +1,6 @@ package com.conveyal.r5.analyst.cluster; +import com.conveyal.analysis.models.CsvResultOptions; import com.conveyal.r5.analyst.StreetTimesAndModes; import com.conveyal.r5.transit.TransitLayer; import com.conveyal.r5.transit.path.Path; @@ -47,19 +48,24 @@ public class PathResult { * With additional changes, patterns could be collapsed further to route combinations or modes. */ public final Multimap[] iterationsForPathTemplates; + private final TransitLayer transitLayer; + private final CsvResultOptions csvOptions; + public static final String[] DATA_COLUMNS = new String[]{ "routes", "boardStops", "alightStops", + "feedIds", "rideTimes", "accessTime", "egressTime", "transferTime", "waitTimes", "totalTime", - "nIterations" + "nIterations", + "group" }; public PathResult(AnalysisWorkerTask task, TransitLayer transitLayer) { @@ -76,6 +82,7 @@ public PathResult(AnalysisWorkerTask task, TransitLayer transitLayer) { } iterationsForPathTemplates = new Multimap[nDestinations]; this.transitLayer = transitLayer; + this.csvOptions = task.csvResultOptions; } /** @@ -108,7 +115,7 @@ public ArrayList[] summarizeIterations(Stat stat) { int nIterations = iterations.size(); checkState(nIterations > 0, "A path was stored without any iterations"); String waits = null, transfer = null, totalTime = null; - String[] path = routeSequence.detailsWithGtfsIds(transitLayer); + String[] path = routeSequence.detailsWithGtfsIds(transitLayer, csvOptions); double targetValue; IntStream totalWaits = iterations.stream().mapToInt(i -> i.waitTimes.sum()); if (stat == Stat.MINIMUM) { @@ -135,7 +142,10 @@ public ArrayList[] summarizeIterations(Stat stat) { score = thisScore; } } - String[] row = ArrayUtils.addAll(path, transfer, waits, totalTime, String.valueOf(nIterations)); + String group = ""; // Reserved for future use + String[] row = ArrayUtils.addAll( + path, transfer, waits, totalTime, String.valueOf(nIterations), group + ); checkState(row.length == DATA_COLUMNS.length); summary[d].add(row); } diff --git a/src/main/java/com/conveyal/r5/transit/TransitLayer.java b/src/main/java/com/conveyal/r5/transit/TransitLayer.java index 871491ff2..f978d5a5e 100644 --- a/src/main/java/com/conveyal/r5/transit/TransitLayer.java +++ b/src/main/java/com/conveyal/r5/transit/TransitLayer.java @@ -54,6 +54,9 @@ import java.util.stream.IntStream; import java.util.stream.StreamSupport; +import static com.conveyal.r5.transit.TransitLayer.EntityRepresentation.ID_ONLY; +import static com.conveyal.r5.transit.TransitLayer.EntityRepresentation.NAME_ONLY; + /** * A key simplifying factor is that we don't handle overnight trips. This is fine for analysis at usual times of day. @@ -815,31 +818,66 @@ public TIntSet findStopsInGeometry (Geometry geometry) { return stops; } + public enum EntityRepresentation { + ID_ONLY, NAME_ONLY, NAME_AND_ID + } + /** * For the given pattern index, returns the GTFS routeId. If includeName is true, the returned string will * also include a route_short_name or route_long_name (if they are not null). */ - public String routeString(int routeIndex, boolean includeName) { + public String routeString (int routeIndex, EntityRepresentation nameOrId) { RouteInfo routeInfo = routes.get(routeIndex); - String route = routeInfo.route_id; - if (includeName) { - if (routeInfo.route_short_name != null) { - route += " (" + routeInfo.route_short_name + ")"; - } else if (routeInfo.route_long_name != null){ - route += " (" + routeInfo.route_long_name + ")"; + String name = routeInfo.route_short_name; + String id = routeInfo.route_id; + // If we might actually use the name, check some fallbacks. + if (nameOrId != ID_ONLY) { + if (name == null) { + name = routeInfo.route_long_name; + } + if (name == null) { + name = routeInfo.route_id; } } - return route; + return switch (nameOrId) { + case NAME_ONLY -> name; + case NAME_AND_ID -> name + " (" + id + ")"; + default -> id; + }; } /** * For the given stop index, returns the GTFS stopId (stripped of R5's feedId prefix) and, if includeName is true, * stopName. */ - public String stopString(int stopIndex, boolean includeName) { - // TODO use a compact feed index, instead of splitting to remove feedIds - String stop = stopIdForIndex.get(stopIndex) == null ? "[new]" : stopIdForIndex.get(stopIndex).split(":")[1]; - if (includeName) stop += " (" + stopNames.get(stopIndex) + ")"; - return stop; + public String stopString(int stopIndex, EntityRepresentation nameOrId) { + String stopId = stopIdForIndex.get(stopIndex); + String stopName = stopNames.get(stopIndex); + // I'd trust the JVM JIT to optimize out these assignments on different code paths, but not the split call. + if (nameOrId != NAME_ONLY) { + if (stopId == null) { + stopId = "[new]"; + } else { + // TODO use a compact feed ID instead of splitting to remove feedIds (or put feedId into another CSV field) + stopId = stopId.split(":")[1]; + } + } + if (nameOrId != ID_ONLY) { + if (stopName == null) { + stopName = "[new]"; + } + } + return switch (nameOrId) { + case NAME_ONLY -> stopName; + case NAME_AND_ID -> stopName + " (" + stopId + ")"; + default -> stopId; + }; + } + + /** + * For a supplied stopIndex in the transit layer, return the feed id (which we prepend to the GTFS stop id). + */ + public String feedFromStop(int stopIndex) { + return stopIdForIndex.get(stopIndex) == null ? "[new]" : stopIdForIndex.get(stopIndex).split(":")[0]; } } diff --git a/src/main/java/com/conveyal/r5/transit/path/RouteSequence.java b/src/main/java/com/conveyal/r5/transit/path/RouteSequence.java index 6ed2eb73c..62e6cfd34 100644 --- a/src/main/java/com/conveyal/r5/transit/path/RouteSequence.java +++ b/src/main/java/com/conveyal/r5/transit/path/RouteSequence.java @@ -1,6 +1,8 @@ package com.conveyal.r5.transit.path; +import com.conveyal.analysis.models.CsvResultOptions; import com.conveyal.r5.transit.TransitLayer; +import com.conveyal.r5.transit.TransitLayer.EntityRepresentation; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; @@ -9,6 +11,8 @@ import java.util.Objects; import java.util.StringJoiner; +import static com.conveyal.r5.transit.TransitLayer.EntityRepresentation.NAME_AND_ID; + /** A door-to-door path that includes the routes ridden between stops */ public class RouteSequence { @@ -27,25 +31,39 @@ public RouteSequence(PatternSequence patternSequence, TransitLayer transitLayer) } } - /** Returns details summarizing this route sequence, using GTFS ids stored in the supplied transitLayer. */ - public String[] detailsWithGtfsIds(TransitLayer transitLayer){ - StringJoiner routeIds = new StringJoiner("|"); - StringJoiner boardStopIds = new StringJoiner("|"); - StringJoiner alightStopIds = new StringJoiner("|"); - StringJoiner rideTimes = new StringJoiner("|"); + /** + * Returns details summarizing this route sequence, using GTFS ids stored in the supplied transitLayer. + * @param csvOptions indicates whether names or IDs should be returned for certain fields. + * @return array of pipe-concatenated strings, with the route, board stop, alight stop, ride time, and feed for + * each transit leg, as well as the access and egress time. + * + * If csvOptions.feedRepresentation is not null, the feed values will be R5-generated UUID for boarding stop of + * each leg. We are grabbing the feed ID from the stop rather than the route (which might seem like a better + * representative of the leg) because stops happen to have a readily available feed ID. + */ + public String[] detailsWithGtfsIds (TransitLayer transitLayer, CsvResultOptions csvOptions){ + StringJoiner routeJoiner = new StringJoiner("|"); + StringJoiner boardStopJoiner = new StringJoiner("|"); + StringJoiner alightStopJoiner = new StringJoiner("|"); + StringJoiner feedJoiner = new StringJoiner("|"); + StringJoiner rideTimeJoiner = new StringJoiner("|"); for (int i = 0; i < routes.size(); i++) { - routeIds.add(transitLayer.routeString(routes.get(i), false)); - boardStopIds.add(transitLayer.stopString(stopSequence.boardStops.get(i), false)); - alightStopIds.add(transitLayer.stopString(stopSequence.alightStops.get(i), false)); - rideTimes.add(String.format("%.1f", stopSequence.rideTimesSeconds.get(i) / 60f)); + routeJoiner.add(transitLayer.routeString(routes.get(i), csvOptions.routeRepresentation)); + boardStopJoiner.add(transitLayer.stopString(stopSequence.boardStops.get(i), csvOptions.stopRepresentation)); + alightStopJoiner.add(transitLayer.stopString(stopSequence.alightStops.get(i), csvOptions.stopRepresentation)); + if (csvOptions.feedRepresentation != null) { + feedJoiner.add(transitLayer.feedFromStop(stopSequence.boardStops.get(i))); + } + rideTimeJoiner.add(String.format("%.1f", stopSequence.rideTimesSeconds.get(i) / 60f)); } String accessTime = stopSequence.access == null ? null : String.format("%.1f", stopSequence.access.time / 60f); String egressTime = stopSequence.egress == null ? null : String.format("%.1f", stopSequence.egress.time / 60f); return new String[]{ - routeIds.toString(), - boardStopIds.toString(), - alightStopIds.toString(), - rideTimes.toString(), + routeJoiner.toString(), + boardStopJoiner.toString(), + alightStopJoiner.toString(), + rideTimeJoiner.toString(), + feedJoiner.toString(), accessTime, egressTime }; @@ -55,9 +73,9 @@ public String[] detailsWithGtfsIds(TransitLayer transitLayer){ public Collection transitLegs(TransitLayer transitLayer) { Collection transitLegs = new ArrayList<>(); for (int i = 0; i < routes.size(); i++) { - String routeString = transitLayer.routeString(routes.get(i), true); - String boardStop = transitLayer.stopString(stopSequence.boardStops.get(i), true); - String alightStop = transitLayer.stopString(stopSequence.alightStops.get(i), true); + String routeString = transitLayer.routeString(routes.get(i), NAME_AND_ID); + String boardStop = transitLayer.stopString(stopSequence.boardStops.get(i), NAME_AND_ID); + String alightStop = transitLayer.stopString(stopSequence.alightStops.get(i), NAME_AND_ID); transitLegs.add(new TransitLeg(routeString, stopSequence.rideTimesSeconds.get(i), boardStop, alightStop)); } return transitLegs;