diff --git a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java index 1ca232832..7f6189322 100644 --- a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java +++ b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java @@ -45,9 +45,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.zip.GZIPOutputStream; import static com.conveyal.analysis.util.JsonUtil.toJson; @@ -165,6 +168,11 @@ private int getIntQueryParameter (Request req, String parameterName, int default } } + /** + * Get a regional analysis results raster for a single (percentile, cutoff, destination) combination, in one of + * several image file formats. This method was factored out for use from two different API endpoints, one for + * fetching a single grid, and another for fetching grids for all combinations of parameters at once. + */ private FileStorageKey getSingleCutoffGrid ( RegionalAnalysis analysis, int cutoffIndex, @@ -242,54 +250,100 @@ private FileStorageKey getSingleCutoffGrid ( private Object getAllRegionalResults (Request req, Response res) throws IOException { // Get the UUID of the regional analysis for which we want the output data. final String regionalAnalysisId = req.params("_id"); + final UserPermissions userPermissions = UserPermissions.from(req); RegionalAnalysis analysis = Persistence.regionalAnalyses.findPermitted( QueryBuilder.start("_id").is(req.params("_id")).get(), DBProjection.exclude("request.scenario.modifications"), - UserPermissions.from(req) + userPermissions ).iterator().next(); if (analysis == null || analysis.deleted) { throw AnalysisServerException.notFound("The specified regional analysis is unknown or has been deleted."); } - List fileStorageKeys = new ArrayList<>(); - if (analysis.cutoffsMinutes == null || analysis.travelTimePercentiles == null || analysis.destinationPointSetIds == null) { - throw AnalysisServerException.badRequest("Batch result download is not available for legacy regional results."); - } - for (String destinationPointSetId : analysis.destinationPointSetIds) { - for (int cutoffIndex = 0; cutoffIndex < analysis.cutoffsMinutes.length; cutoffIndex++) { - for (int percentile : analysis.travelTimePercentiles) { - FileStorageKey fileStorageKey = getSingleCutoffGrid( - analysis, - cutoffIndex, - destinationPointSetId, - percentile, - FileStorageFormat.GEOTIFF + FileStorageKey zippedResultsKey = new FileStorageKey(RESULTS, analysis._id + "_ALL.zip"); + // Keep only alphanumeric characters and underscores from user-specified analysis name. + String friendlyAnalysisName = filenameCleanString(analysis.name); + if (!fileStorage.exists(zippedResultsKey)) { + List fileStorageKeys = new ArrayList<>(); + if (analysis.cutoffsMinutes == null || analysis.travelTimePercentiles == null || analysis.destinationPointSetIds == null) { + throw AnalysisServerException.badRequest("Batch result download is not available for legacy regional results."); + } + // Map from cryptic UUIDs to human readable strings that can replace them. + Map friendlyReplacements = new HashMap<>(); + { + // Replace the regional analysis ID with its name. + friendlyReplacements.put(analysis._id, friendlyAnalysisName); + // Replace each destination point set ID with its name. + int d = 0; + for (String destinationPointSetId : analysis.destinationPointSetIds) { + OpportunityDataset opportunityDataset = Persistence.opportunityDatasets.findByIdIfPermitted( + destinationPointSetId, + userPermissions ); - fileStorageKeys.add(fileStorageKey); + checkNotNull(opportunityDataset, "Opportunity dataset could not be found in database."); + // Avoid name collisions, prepend an integer. + String friendlyDestinationName = "D%d_%s".formatted(d++, filenameCleanString(opportunityDataset.name)); + friendlyReplacements.put(destinationPointSetId, friendlyDestinationName); } } - } - File tempZipFile = File.createTempFile("regional", ".zip"); - tempZipFile.delete(); // FIXME: zipfs doesn't work on existing empty files, the file has to not exist. - tempZipFile.deleteOnExit(); - Map env = Map.of("create", "true"); - URI uri = URI.create("jar:file:" + tempZipFile.getAbsolutePath()); - try (FileSystem zipFilesystem = FileSystems.newFileSystem(uri, env)) { - for (FileStorageKey fileStorageKey : fileStorageKeys) { - Path storagePath = fileStorage.getFile(fileStorageKey).toPath(); - Path zipPath = zipFilesystem.getPath(storagePath.getFileName().toString()); - Files.copy(storagePath, zipPath, StandardCopyOption.REPLACE_EXISTING); + // Iterate over all combinations and generate one geotiff grid output for each one. + // TODO check whether origins are freeform: if (analysis.request.originPointSetKey == null) ? + for (String destinationPointSetId : analysis.destinationPointSetIds) { + for (int cutoffIndex = 0; cutoffIndex < analysis.cutoffsMinutes.length; cutoffIndex++) { + for (int percentile : analysis.travelTimePercentiles) { + FileStorageKey fileStorageKey = getSingleCutoffGrid( + analysis, + cutoffIndex, + destinationPointSetId, + percentile, + FileStorageFormat.GEOTIFF + ); + fileStorageKeys.add(fileStorageKey); + } + } + } + // Also include any CSV files that were generated. TODO these are gzipped, decompress and re-compress. + // for (String csvStorageName : analysis.resultStorage.values()) { + // LOG.info("Including {} in results zip file.", csvStorageName); + // FileStorageKey csvKey = new FileStorageKey(RESULTS, csvStorageName); + // fileStorageKeys.add(csvKey); + // } + File tempZipFile = File.createTempFile("regional", ".zip"); + // Zipfs can't open existing empty files, the file has to not exist. FIXME: Non-dangerous race condition + // Examining ZipFileSystemProvider reveals a "useTempFile" env parameter, but this is for the individual entries. + // May be better to just use zipOutputStream which would also allow gzip - zip CSV conversion. + tempZipFile.delete(); + Map env = Map.of("create", "true"); + URI uri = URI.create("jar:file:" + tempZipFile.getAbsolutePath()); + try (FileSystem zipFilesystem = FileSystems.newFileSystem(uri, env)) { + for (FileStorageKey fileStorageKey : fileStorageKeys) { + Path storagePath = fileStorage.getFile(fileStorageKey).toPath(); + String nameInZip = storagePath.getFileName().toString(); + // This is so inefficient but it should work. + for (String replacementKey : friendlyReplacements.keySet()) { + nameInZip = nameInZip.replace(replacementKey, friendlyReplacements.get(replacementKey)); + } + Path zipPath = zipFilesystem.getPath(nameInZip); + Files.copy(storagePath, zipPath, StandardCopyOption.REPLACE_EXISTING); + } } + fileStorage.moveIntoStorage(zippedResultsKey, tempZipFile); } - FileStorageKey fileStorageKey = new FileStorageKey(RESULTS, analysis._id + "_ALL.zip"); - fileStorage.moveIntoStorage(fileStorageKey, tempZipFile); - String humanReadableName = analysis.name + ".zip"; - res.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"%s\"".formatted(humanReadableName)); + String humanReadableZipName = friendlyAnalysisName + ".zip"; + res.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"%s\"".formatted(humanReadableZipName)); return JsonUtil.toJsonString(JsonUtil.objectNode() - .put("url", fileStorage.getURL(fileStorageKey)) - .put("name", humanReadableName) + .put("url", fileStorage.getURL(zippedResultsKey)) + .put("name", humanReadableZipName) ); } + private String filenameCleanString (String original) { + String ret = original.replaceAll("\\W+", "_"); + if (ret.length() > 20) { + ret = ret.substring(0, 20); + } + return ret; + } + /** * This used to extract a particular percentile of a regional analysis as a grid file. * Now it just gets the single percentile that exists for any one analysis, either from the local buffer file diff --git a/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java b/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java index df289c9fe..8bcf94d26 100644 --- a/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java +++ b/src/main/java/com/conveyal/analysis/results/BaseResultWriter.java @@ -61,6 +61,7 @@ protected synchronized void finish (String fileName) throws IOException { // There's probably a more elegant way to do this with NIO and without closing the buffer. // That would be Files.copy(File.toPath(),X) or ByteStreams.copy. + // Perhaps better: we could wrap the output buffer in a gzip output stream and zip as we write out. InputStream is = new BufferedInputStream(new FileInputStream(bufferFile)); OutputStream os = new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(gzippedResultFile))); ByteStreams.copy(is, os);