diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/controler/DatasetApiController.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/controler/DatasetApiController.java index c6a40bc04f..01b260ffc9 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/controler/DatasetApiController.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/controler/DatasetApiController.java @@ -60,6 +60,7 @@ import org.slf4j.LoggerFactory; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.core.io.ByteArrayResource; import org.springframework.data.domain.Page; @@ -139,8 +140,9 @@ public class DatasetApiController implements DatasetApi { @Autowired private SolrService solrService; - @Autowired - DatasetDownloaderServiceImpl datasetDownloaderService; + @Qualifier("datasetDownloaderServiceImpl") + @Autowired + protected DatasetDownloaderServiceImpl datasetDownloaderService; @Autowired private ObjectMapper objectMapper; diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/repository/DatasetRepository.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/repository/DatasetRepository.java index 7ee363b4bb..dcb04bac45 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/repository/DatasetRepository.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/repository/DatasetRepository.java @@ -79,4 +79,9 @@ public interface DatasetRepository extends PagingAndSortingRepository deleteByDatasetProcessingId(Long id); boolean existsByTagsContains(StudyTag tag); + + @Query(value="SELECT ds.id FROM dataset as ds " + + "INNER JOIN input_of_dataset_processing as input ON ds.id=input.dataset_id " + + "WHERE input.processing_id = :processingId or ds.dataset_processing_id = :processingId", nativeQuery = true) + List findDatasetsByProcessingId(Long processingId); } \ No newline at end of file diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/security/DatasetSecurityService.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/security/DatasetSecurityService.java index d5672036fe..c8d331b306 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/security/DatasetSecurityService.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/security/DatasetSecurityService.java @@ -466,28 +466,32 @@ public boolean hasRightOnEveryDataset(List datasetIds, String rightStr) th } Iterable datasets = datasetRepository.findAllById(datasetIds); - - boolean hasRight = true; - for (Dataset dataset : datasets) { - if (dataset.getDatasetAcquisition() == null - || dataset.getDatasetAcquisition().getExamination() == null - || dataset.getDatasetAcquisition().getExamination().getStudyId() == null) { - - if (dataset.getDatasetProcessing() != null && dataset.getDatasetProcessing().getInputDatasets() != null) { - for (Dataset inputDs : dataset.getDatasetProcessing().getInputDatasets()) { - hasRight &= hasRightOnTrustedDataset(inputDs, rightStr); - } - } else { - throw new IllegalStateException("Cannot check dataset n°" + dataset.getId() + " rights, this dataset has neither examination nor processing parent !"); - } - } else { - hasRight &= this.hasRightOnStudyCenter(dataset.getDatasetAcquisition().getExamination().getCenterId(), dataset.getDatasetAcquisition().getExamination().getStudyId(), rightStr); - } - } - return hasRight; - } - /** + return hasRigthOnDatasets(datasets, rightStr); + } + + private boolean hasRigthOnDatasets(Iterable datasets, String rightStr) { + boolean hasRight = true; + for (Dataset dataset : datasets) { + if (dataset.getDatasetAcquisition() == null + || dataset.getDatasetAcquisition().getExamination() == null + || dataset.getDatasetAcquisition().getExamination().getStudyId() == null) { + + if (dataset.getDatasetProcessing() != null && dataset.getDatasetProcessing().getInputDatasets() != null) { + for (Dataset inputDs : dataset.getDatasetProcessing().getInputDatasets()) { + hasRight &= hasRightOnTrustedDataset(inputDs, rightStr); + } + } else { + throw new IllegalStateException("Cannot check dataset n°" + dataset.getId() + " rights, this dataset has neither examination nor processing parent !"); + } + } else { + hasRight &= this.hasRightOnStudyCenter(dataset.getDatasetAcquisition().getExamination().getCenterId(), dataset.getDatasetAcquisition().getExamination().getStudyId(), rightStr); + } + } + return hasRight; + } + + /** * Check that the connected user has the given right for the given dataset. * * @param dataset the dataset @@ -1103,5 +1107,45 @@ public boolean hasRightOnExamination(String examinationUID, String rightStr) thr Long id = studyInstanceUIDHandler.extractExaminationId(examinationUID); return hasRightOnExamination(id, rightStr); } - + + public boolean HasRightOnEveryDatasetOfProcessings(List processingIds, String rightStr) { + boolean hasRight = true; + + for (Long processingId : processingIds) { + if (KeycloakUtil.getTokenRoles().contains("ROLE_ADMIN") || processingId == null) { + continue; + } + Iterable datasets = datasetRepository.findDatasetsByProcessingId(processingId); + + hasRight &= hasRigthOnDatasets(datasets, rightStr); + } + return hasRight; + } + + /** + * Check that the connected user has the given right for the given examination. + * + * @param examinationIds the examination ids + * @param rightStr the right + * @return true or false + * @throws EntityNotFoundException + */ + public boolean hasRightOnExaminations(List examinationIds, String rightStr) throws EntityNotFoundException { + if (KeycloakUtil.getTokenRoles().contains(ROLE_ADMIN)) { + return true; + } + for (Long examinationId : examinationIds) { + Examination exam = examinationRepository.findById(examinationId).orElse(null); + if (exam == null) { + throw new EntityNotFoundException("Cannot find examination with id " + examinationId); + } + if (exam.getStudyId() == null) { + return false; + } + if(!this.hasRightOnStudyCenter(exam.getCenterId(), exam.getStudyId(), rightStr)){ + return false; + } + } + return true; + } } diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/service/DatasetDownloaderServiceImpl.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/service/DatasetDownloaderServiceImpl.java index 89ec7b4663..11aa56c5a5 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/service/DatasetDownloaderServiceImpl.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/dataset/service/DatasetDownloaderServiceImpl.java @@ -60,51 +60,49 @@ @Service public class DatasetDownloaderServiceImpl { - private static final String FAILURES_TXT = "failures.txt"; + protected static final String FAILURES_TXT = "failures.txt"; - private static final String NII = "nii"; + protected static final String NII = "nii"; - private static final String DCM = "dcm"; + protected static final String DCM = "dcm"; - private static final String ZIP = ".zip"; + protected static final String ZIP = ".zip"; - private static final Logger LOG = LoggerFactory.getLogger(DatasetDownloaderServiceImpl.class); + protected static final Logger LOG = LoggerFactory.getLogger(DatasetDownloaderServiceImpl.class); - private static final String JSON_RESULT_FILENAME = "ERRORS.json"; + protected static final String JSON_RESULT_FILENAME = "ERRORS.json"; - private static final Long DEFAULT_NIFTI_CONVERTER_ID = 6L; + protected static final Long DEFAULT_NIFTI_CONVERTER_ID = 6L; - public static final String GZIP_EXTENSION = ".gz"; + protected static final String GZIP_EXTENSION = ".gz"; - public static final String NII_GZ = ".nii.gz"; - - public static final String CONVERSION_FAILED_ERROR_MSG = "Nifti conversion failed, you may try to select another one."; + protected static final String NII_GZ = ".nii.gz"; + + protected static final String CONVERSION_FAILED_ERROR_MSG = "Nifti conversion failed, you may try to select another one."; @Autowired DatasetService datasetService; @Autowired - private WADODownloaderService downloader; + protected WADODownloaderService downloader; @Autowired - private SubjectRepository subjectRepository; + protected SubjectRepository subjectRepository; @Autowired - private StudyRepository studyRepository; + protected StudyRepository studyRepository; @Autowired - private RabbitTemplate rabbitTemplate; + protected RabbitTemplate rabbitTemplate; @Autowired - ShanoirEventService eventService; - - SimpleDateFormat fileDateformatter = new SimpleDateFormat("yyyyMMddHHmmss"); + protected ShanoirEventService eventService; @Autowired - private ObjectMapper objectMapper; + protected ObjectMapper objectMapper; @PostConstruct - private void initialize() { + protected void initialize() { // Set timeout to 5mn (consider nifti reconversion can take some time) this.rabbitTemplate.setReplyTimeout(300000); } @@ -115,74 +113,25 @@ public void massiveDownload(String format, List datasets, HttpServletRe response.setContentType("application/zip"); response.setHeader("Content-Disposition", "attachment;filename=" + getFileName(datasets)); - SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmssSSS"); Map downloadResults = new HashMap(); try (ZipOutputStream zipOutputStream = new ZipOutputStream(response.getOutputStream())) { for (Dataset dataset : datasets) { - if (!dataset.isDownloadable()) { - downloadResults.put(dataset.getId(), new DatasetDownloadError("Dataset not downloadable", DatasetDownloadError.ERROR)); - continue; - } - DatasetDownloadError downloadResult = new DatasetDownloadError(); - downloadResults.put(dataset.getId(), downloadResult); - // Create a new folder organized by subject / examination String subjectName = getSubjectName(dataset); if (subjectName.contains(File.separator)) { subjectName = subjectName.replaceAll(File.separator, "_"); } + String studyName = studyRepository.findById(dataset.getStudyId()).orElse(null).getName(); + String datasetFilePath = null; if (datasets.size() != 1) { datasetFilePath = getDatasetFilepath(dataset, studyName, subjectName); } + manageDatasetDownload(dataset, downloadResults, zipOutputStream, subjectName, datasetFilePath, format, withManifest, filesByAcquisitionId, converterId); - List pathURLs = new ArrayList<>(); - - if (dataset.getDatasetProcessing() != null) { - // DOWNLOAD PROCESSED DATASET - DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.NIFTI_SINGLE_FILE, downloadResult); - DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, true, datasetFilePath); - } else if (dataset instanceof EegDataset) { - // DOWNLOAD EEG - DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.EEG, downloadResult); - DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, false, datasetFilePath); - } else if (dataset instanceof BidsDataset) { - // DOWNLOAD BIDS - DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.BIDS, downloadResult); - DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, true, datasetFilePath); - // Manage errors here - } else if (DCM.equals(format)) { - // DOWNLOAD DICOM - DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.DICOM, downloadResult); - List files = downloader.downloadDicomFilesForURLsAsZip(pathURLs, zipOutputStream, subjectName, dataset, datasetFilePath, downloadResult); - if (withManifest) { - filesByAcquisitionId.putIfAbsent(dataset.getDatasetAcquisition().getId(), new ArrayList<>()); - filesByAcquisitionId.get(dataset.getDatasetAcquisition().getId()).addAll(files); - } - } else if (NII.equals(format)) { - // Check if we have a specific converter -> nifti reconversion - if (converterId != null) { - reconvertToNifti(format, response, converterId, dataset, formatter, pathURLs, downloadResult, subjectName, zipOutputStream); - } else { - // Check that we have existing nifti, otherwise reconvert using dcm2niix by default. - DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.NIFTI_SINGLE_FILE, downloadResult); - if (!pathURLs.isEmpty()) { - List files = DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, false, datasetFilePath); - } else { - // Reconvert using dcm2niix by default. - reconvertToNifti(format, response, DEFAULT_NIFTI_CONVERTER_ID, dataset, formatter, pathURLs, downloadResult, subjectName, zipOutputStream); - } - } - } else { - downloadResult.update("Dataset format was not adapted to dataset download choosen", DatasetDownloadError.ERROR); - } - - if (downloadResult.getStatus() == null) { - downloadResults.remove(dataset.getId()); - } } if(!filesByAcquisitionId.isEmpty()){ DatasetFileUtils.writeManifestForExport(zipOutputStream, filesByAcquisitionId); @@ -211,7 +160,61 @@ public void massiveDownload(String format, List datasets, HttpServletRe } } - private void reconvertToNifti(String format, HttpServletResponse response, Long converterId, Dataset dataset, SimpleDateFormat formatter, List pathURLs, DatasetDownloadError downloadResult, String subjectName, ZipOutputStream zipOutputStream) throws RestServiceException, IOException { + protected void manageDatasetDownload(Dataset dataset, Map downloadResults, ZipOutputStream zipOutputStream, String subjectName, String datasetFilePath, String format, boolean withManifest, Map> filesByAcquisitionId, Long converterId) throws IOException, RestServiceException { + if (!dataset.isDownloadable()) { + downloadResults.put(dataset.getId(), new DatasetDownloadError("Dataset not downloadable", DatasetDownloadError.ERROR)); + return; + } + DatasetDownloadError downloadResult = new DatasetDownloadError(); + downloadResults.put(dataset.getId(), downloadResult); + + List pathURLs = new ArrayList<>(); + + if (dataset.getDatasetProcessing() != null) { + // DOWNLOAD PROCESSED DATASET + DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.NIFTI_SINGLE_FILE, downloadResult); + DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, true, datasetFilePath); + } else if (dataset instanceof EegDataset) { + // DOWNLOAD EEG + DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.EEG, downloadResult); + DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, false, datasetFilePath); + } else if (dataset instanceof BidsDataset) { + // DOWNLOAD BIDS + DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.BIDS, downloadResult); + DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, true, datasetFilePath); + // Manage errors here + } else if (Objects.equals("dcm", format)) { + // DOWNLOAD DICOM + DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.DICOM, downloadResult); + List files = downloader.downloadDicomFilesForURLsAsZip(pathURLs, zipOutputStream, subjectName, dataset, datasetFilePath, downloadResult); + if (withManifest) { + filesByAcquisitionId.putIfAbsent(dataset.getDatasetAcquisition().getId(), new ArrayList<>()); + filesByAcquisitionId.get(dataset.getDatasetAcquisition().getId()).addAll(files); + } + } else if (Objects.equals("nii", format)) { + // Check if we have a specific converter -> nifti reconversion + if (converterId != null) { + reconvertToNifti(format, converterId, dataset, pathURLs, downloadResult, subjectName, zipOutputStream); + } else { + // Check that we have existing nifti, otherwise reconvert using dcm2niix by default. + DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.NIFTI_SINGLE_FILE, downloadResult); + if (!pathURLs.isEmpty()) { + List files = DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, false, datasetFilePath); + } else { + // Reconvert using dcm2niix by default. + reconvertToNifti(format, DEFAULT_NIFTI_CONVERTER_ID, dataset, pathURLs, downloadResult, subjectName, zipOutputStream); + } + } + } else { + downloadResult.update("Dataset format was not adapted to dataset download choosen", DatasetDownloadError.ERROR); + } + + if (downloadResult.getStatus() == null) { + downloadResults.remove(dataset.getId()); + } + } + + protected void reconvertToNifti(String format, Long converterId, Dataset dataset, List pathURLs, DatasetDownloadError downloadResult, String subjectName, ZipOutputStream zipOutputStream) throws RestServiceException, IOException { File userDir = DatasetFileUtils.getUserImportDir("/tmp"); String tmpFilePath = userDir + File.separator + dataset.getId() + "_" + format; @@ -272,7 +275,7 @@ private void reconvertToNifti(String format, HttpServletResponse response, Long } } - private String getSubjectName(Dataset dataset) { + protected String getSubjectName(Dataset dataset) { String subjectName = "unknownSubject"; if(dataset.getSubjectId() != null){ Optional subjectOpt = subjectRepository.findById(dataset.getSubjectId()); @@ -283,7 +286,8 @@ private String getSubjectName(Dataset dataset) { return subjectName; } - private String getFileName(List datasets) { + protected String getFileName(List datasets) { + SimpleDateFormat fileDateformatter = new SimpleDateFormat("yyyyMMddHHmmss"); if (datasets != null && datasets.size() == 1) { String datasetName = getDatasetFileName(datasets.get(0)); return "Dataset_" + datasetName + "_" + fileDateformatter.format(new DateTime().toDate()) + ZIP; @@ -292,7 +296,7 @@ private String getFileName(List datasets) { } } - private String getDatasetFileName(Dataset dataset) { + protected String getDatasetFileName(Dataset dataset) { // Only one dataset -> the logic for one dataset is used String subjectName = getSubjectName(dataset); @@ -305,7 +309,7 @@ private String getDatasetFileName(Dataset dataset) { return datasetName; } - private String getDatasetFilepath(Dataset dataset, String studyName, String subjectName) { + protected String getDatasetFilepath(Dataset dataset, String studyName, String subjectName) { Examination exam = datasetService.getExamination(dataset); String datasetFilePath = studyName + "_" + subjectName + "_Exam-" + exam.getId(); diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApi.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApi.java index 89076bd69c..2ae6883705 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApi.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApi.java @@ -2,12 +2,12 @@ * Shanoir NG - Import, manage and share neuroimaging data * Copyright (C) 2009-2019 Inria - https://www.inria.fr/ * Contact us on https://project.inria.fr/shanoir/ - * + *

* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. - * + *

* You should have received a copy of the GNU General Public License * along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.html */ @@ -19,11 +19,13 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.servlet.http.HttpServletResponse; import jakarta.validation.Valid; import org.apache.solr.client.solrj.SolrServerException; import org.shanoir.ng.dataset.dto.DatasetDTO; import org.shanoir.ng.processing.dto.DatasetProcessingDTO; import org.shanoir.ng.processing.model.DatasetProcessing; +import org.shanoir.ng.shared.exception.EntityNotFoundException; import org.shanoir.ng.shared.exception.RestServiceException; import org.shanoir.ng.shared.exception.ShanoirException; import org.springframework.http.ResponseEntity; @@ -38,86 +40,118 @@ @RequestMapping("/datasetProcessing") public interface DatasetProcessingApi { - @Operation(summary = "", description = "Deletes a dataset processing") - @ApiResponses(value = { @ApiResponse(responseCode = "204", description = "dataset processing deleted"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "404", description = "no dataset processing found"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @DeleteMapping(value = "/{datasetProcessingId}", produces = { "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT')") - ResponseEntity deleteDatasetProcessing( - @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId) + @Operation(summary = "", description = "Deletes a dataset processing") + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "dataset processing deleted"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "404", description = "no dataset processing found"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @DeleteMapping(value = "/{datasetProcessingId}", produces = {"application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT')") + ResponseEntity deleteDatasetProcessing( + @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId) throws RestServiceException, ShanoirException, SolrServerException, IOException; - @Operation(summary = "", description = "If exists, returns the dataset processing corresponding to the given id") - @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "found dataset processing"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "404", description = "no dataset processing found"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @GetMapping(value = "/{datasetProcessingId}", produces = { "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") - ResponseEntity findDatasetProcessingById( - @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); + @Operation(summary = "", description = "If exists, returns the dataset processing corresponding to the given id") + @ApiResponses(value = {@ApiResponse(responseCode = "200", description = "found dataset processing"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "404", description = "no dataset processing found"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @GetMapping(value = "/{datasetProcessingId}", produces = {"application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") + ResponseEntity findDatasetProcessingById( + @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); - @Operation(summary = "", description = "Returns the dataset processings with given study and subject") - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "found dataset processings"), - @ApiResponse(responseCode = "204", description = "no dataset processing found"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @GetMapping(value = "", produces = { "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") - ResponseEntity> findDatasetProcessings(); + @Operation(summary = "", description = "Returns the dataset processings with given study and subject") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "found dataset processings"), + @ApiResponse(responseCode = "204", description = "no dataset processing found"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @GetMapping(value = "", produces = {"application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") + ResponseEntity> findDatasetProcessings(); - @Operation(summary = "", description = "Returns the input datasets of a processing") - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "found dataset processings"), - @ApiResponse(responseCode = "204", description = "no dataset processing found"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @GetMapping(value = "/{datasetProcessingId}/inputDatasets/", produces = { "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") - ResponseEntity> getInputDatasets(@Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); + @Operation(summary = "", description = "Returns the input datasets of a processing") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "found dataset processings"), + @ApiResponse(responseCode = "204", description = "no dataset processing found"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @GetMapping(value = "/{datasetProcessingId}/inputDatasets/", produces = {"application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") + ResponseEntity> getInputDatasets(@Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); - @Operation(summary = "", description = "Returns the output datasets of a processing") - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "found dataset processings"), - @ApiResponse(responseCode = "204", description = "no dataset processing found"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @GetMapping(value = "/{datasetProcessingId}/outputDatasets/", produces = { "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") - ResponseEntity> getOutputDatasets(@Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); + @Operation(summary = "", description = "Returns the output datasets of a processing") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "found dataset processings"), + @ApiResponse(responseCode = "204", description = "no dataset processing found"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @GetMapping(value = "/{datasetProcessingId}/outputDatasets/", produces = {"application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") + ResponseEntity> getOutputDatasets(@Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId); - @Operation(summary = "", description = "Saves a new dataset processing") - @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "created dataset processing"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "422", description = "bad parameters"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @PostMapping(value = "", produces = { "application/json" }, consumes = { - "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") - ResponseEntity saveNewDatasetProcessing(@Parameter(description = "dataset processing to create", required = true) @Valid @RequestBody DatasetProcessing datasetProcessing, - BindingResult result) throws RestServiceException; + @Operation(summary = "", description = "Saves a new dataset processing") + @ApiResponses(value = {@ApiResponse(responseCode = "200", description = "created dataset processing"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "422", description = "bad parameters"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @PostMapping(value = "", produces = {"application/json"}, consumes = { + "application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER')") + ResponseEntity saveNewDatasetProcessing(@Parameter(description = "dataset processing to create", required = true) @Valid @RequestBody DatasetProcessing datasetProcessing, + BindingResult result) throws RestServiceException; - @Operation(summary = "", description = "Updates a dataset processing") - @ApiResponses(value = { @ApiResponse(responseCode = "204", description = "dataset processing updated"), - @ApiResponse(responseCode = "401", description = "unauthorized"), - @ApiResponse(responseCode = "403", description = "forbidden"), - @ApiResponse(responseCode = "422", description = "bad parameters"), - @ApiResponse(responseCode = "500", description = "unexpected error") }) - @PutMapping(value = "/{datasetProcessingId}", produces = { "application/json" }, consumes = { - "application/json" }) - @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER') and @controlerSecurityService.idMatches(#datasetProcessingId, #datasetProcessing)") - ResponseEntity updateDatasetProcessing( - @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId, - @Parameter(description = "dataset processing to update", required = true) @Valid @RequestBody DatasetProcessing datasetProcessing, BindingResult result) - throws RestServiceException; + @Operation(summary = "", description = "Updates a dataset processing") + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "dataset processing updated"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "422", description = "bad parameters"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @PutMapping(value = "/{datasetProcessingId}", produces = {"application/json"}, consumes = { + "application/json"}) + @PreAuthorize("hasAnyRole('ADMIN', 'EXPERT', 'USER') and @controlerSecurityService.idMatches(#datasetProcessingId, #datasetProcessing)") + ResponseEntity updateDatasetProcessing( + @Parameter(description = "id of the dataset processing", required = true) @PathVariable("datasetProcessingId") Long datasetProcessingId, + @Parameter(description = "dataset processing to update", required = true) @Valid @RequestBody DatasetProcessing datasetProcessing, BindingResult result) + throws RestServiceException; + + @Operation(summary = "massiveDownloadByProcessingIds", description = "If exists, returns a zip file of the inputs/outputs per processing corresponding to the given processing IDs. Datas are in the http response body, it must be written in a zip file. Datas are sorted with folders according to their respective examination and processing.") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "zip file"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "404", description = "no dataset found"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @PostMapping(value = "/massiveDownloadByProcessingIds") + @PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.HasRightOnEveryDatasetOfProcessings(#processingIds, 'CAN_DOWNLOAD'))") + void massiveDownloadByProcessingIds( + @Parameter(description = "id of the processing", required = true) @Valid + @RequestBody List processingIds, + @Parameter(description = "outputs to extract") @Valid + @RequestParam(value = "resultOnly", defaultValue = "false") boolean resultOnly, HttpServletResponse response) throws RestServiceException; + + @Operation(summary = "massiveDownloadProcessingByExaminationIds", description = "If exists, returns a zip file of the inputs/outputs per processing corresponding to the given examination IDs. Datas are in the http response body, it must be written in a zip file. Datas are sorted with folders according to their respective examination and processing.") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "zip file"), + @ApiResponse(responseCode = "401", description = "unauthorized"), + @ApiResponse(responseCode = "403", description = "forbidden"), + @ApiResponse(responseCode = "404", description = "no dataset found"), + @ApiResponse(responseCode = "500", description = "unexpected error")}) + @PostMapping(value = "/massiveDownloadProcessingByExaminationIds") + @PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.hasRightOnExaminations(#examinationIds, 'CAN_DOWNLOAD'))") + void massiveDownloadProcessingByExaminationIds( + @Parameter(description = "id of the examination", required = true) @Valid + @RequestBody List examinationIds, + @Parameter(description = "comment of the desired processings") @Valid + @RequestParam(value = "processingComment", required = false) String processingComment, + @Parameter(description = "outputs to extract") @Valid + @RequestParam(value = "resultOnly", defaultValue = "false") boolean resultOnly, HttpServletResponse response) throws RestServiceException; } diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApiController.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApiController.java index 5e8d05c40b..c158c51ed8 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApiController.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/controler/DatasetProcessingApiController.java @@ -15,16 +15,23 @@ package org.shanoir.ng.processing.controler; import io.swagger.v3.oas.annotations.Parameter; +import jakarta.servlet.http.HttpServletResponse; import jakarta.validation.Valid; +import org.apache.poi.ss.extractor.ExcelExtractor; import org.apache.solr.client.solrj.SolrServerException; import org.shanoir.ng.dataset.dto.DatasetDTO; import org.shanoir.ng.dataset.dto.mapper.DatasetMapper; import org.shanoir.ng.dataset.model.Dataset; +import org.shanoir.ng.dataset.model.DatasetExpressionFormat; import org.shanoir.ng.dataset.service.DatasetService; +import org.shanoir.ng.examination.model.Examination; +import org.shanoir.ng.examination.service.ExaminationService; import org.shanoir.ng.processing.dto.DatasetProcessingDTO; import org.shanoir.ng.processing.dto.mapper.DatasetProcessingMapper; import org.shanoir.ng.processing.model.DatasetProcessing; +import org.shanoir.ng.processing.model.DatasetProcessingType; import org.shanoir.ng.processing.service.DatasetProcessingService; +import org.shanoir.ng.processing.service.ProcessingDownloaderServiceImpl; import org.shanoir.ng.shared.error.FieldErrorMap; import org.shanoir.ng.shared.exception.*; import org.shanoir.ng.utils.KeycloakUtil; @@ -38,9 +45,12 @@ import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestParam; import java.io.IOException; +import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -60,7 +70,14 @@ public class DatasetProcessingApiController implements DatasetProcessingApi { private DatasetProcessingService datasetProcessingService; @Autowired - private DatasetService datasetService; + private ProcessingDownloaderServiceImpl processingDownloaderService; + + @Autowired + private ExaminationService examinationService; + + public DatasetProcessingApiController(){ + + } @Override public ResponseEntity deleteDatasetProcessing( @@ -147,7 +164,7 @@ public ResponseEntity updateDatasetProcessing( return new ResponseEntity<>(HttpStatus.NOT_FOUND); } } - + private void validate(BindingResult result) throws RestServiceException { final FieldErrorMap errors = new FieldErrorMap(result); if (!errors.isEmpty()) { @@ -155,4 +172,60 @@ private void validate(BindingResult result) throws RestServiceException { throw new RestServiceException(error); } } + + @Override + public void massiveDownloadByProcessingIds( + @Parameter(description = "ids of processing", required=true) @Valid + @RequestBody List processingIds, + @Parameter(description = "outputs to extract") @Valid + @RequestParam(value = "resultOnly") boolean resultOnly, + HttpServletResponse response) throws RestServiceException { + + List processingList = new ArrayList<>(); + for (Long processingId : processingIds) { + DatasetProcessing processing = null; + try { + if(processingId == null){ + throw new Exception(); + } + processing = datasetProcessingService.findById(processingId).get(); + processingList.add(processing); + }catch (Exception e) { + throw new RestServiceException( + new ErrorModel(HttpStatus.FORBIDDEN.value(), processingId + " is not a valid processing id.")); + } + } + processingDownloaderService.massiveDownload(processingList, resultOnly, "dcm" , response, false, null); + } + + @Override + public void massiveDownloadProcessingByExaminationIds( + @Parameter(description = "ids of examination", required=true) @Valid + @RequestBody List examinationIds, + @Parameter(description = "comment of the desired processings") @Valid + @RequestParam(value = "processingComment", required = false) String processingComment, + @Parameter(description = "outputs to extract") @Valid + @RequestParam(value = "resultOnly") boolean resultOnly, + HttpServletResponse response) throws RestServiceException { + + List examinationList = new ArrayList<>(); + for (Long examinationId : examinationIds) { + Examination examination = null; + try { + if(examinationId == null){ + throw new Exception(); + } + examination = examinationService.findById(examinationId); + + if(Objects.isNull(examination)){ + throw new Exception(); + } + examinationList.add(examination); + }catch (Exception e) { + throw new RestServiceException( + new ErrorModel(HttpStatus.FORBIDDEN.value(), examinationId + " is not a valid examination id.")); + } + } + processingDownloaderService.massiveDownloadByExaminations(examinationList, processingComment, resultOnly, "dcm" , response, false, null); + } } diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/repository/DatasetProcessingRepository.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/repository/DatasetProcessingRepository.java index 549d8f31f0..e9beffc4ef 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/repository/DatasetProcessingRepository.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/repository/DatasetProcessingRepository.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Optional; import org.shanoir.ng.processing.model.DatasetProcessing; +import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.CrudRepository; /** @@ -43,4 +44,17 @@ public interface DatasetProcessingRepository extends CrudRepository findAllByInputDatasets_Id(Long datasetId); List findAllByParentId(Long id); + + /** + * Find all processings that are linked to given examinations + * + * @param examinationIds + * @return + */ + @Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " + + "INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " + + "INNER JOIN dataset as dataset ON dataset.id=input.dataset_id " + + "INNER JOIN dataset_acquisition as acquisition ON acquisition.id=dataset.dataset_acquisition_id " + + "WHERE acquisition.examination_id IN (:examinationIds)", nativeQuery = true) + List findAllIdsByExaminationIds(List examinationIds); } diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/DatasetProcessingServiceImpl.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/DatasetProcessingServiceImpl.java index 7ddcbb78f1..052b1cef8d 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/DatasetProcessingServiceImpl.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/DatasetProcessingServiceImpl.java @@ -84,6 +84,10 @@ public Optional findById(final Long id) { public List findAll() { return Utils.toList(repository.findAll()); } + + public List findAllById(List idList) { + return idList.stream().flatMap(it -> findById(it).stream()).toList(); + } @Override public DatasetProcessing create(final DatasetProcessing entity) { diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/ProcessingDownloaderServiceImpl.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/ProcessingDownloaderServiceImpl.java new file mode 100644 index 0000000000..f86de0f0ab --- /dev/null +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/processing/service/ProcessingDownloaderServiceImpl.java @@ -0,0 +1,160 @@ +package org.shanoir.ng.processing.service; + +import jakarta.servlet.http.HttpServletResponse; +import org.apache.solr.common.util.Pair; +import org.shanoir.ng.dataset.modality.BidsDataset; +import org.shanoir.ng.dataset.modality.EegDataset; +import org.shanoir.ng.dataset.model.Dataset; +import org.shanoir.ng.dataset.model.DatasetExpressionFormat; +import org.shanoir.ng.dataset.service.DatasetDownloaderServiceImpl; +import org.shanoir.ng.datasetacquisition.model.DatasetAcquisition; +import org.shanoir.ng.download.DatasetDownloadError; +import org.shanoir.ng.download.WADODownloaderService; +import org.shanoir.ng.examination.model.Examination; +import org.shanoir.ng.processing.model.DatasetProcessing; +import org.shanoir.ng.processing.model.DatasetProcessingType; +import org.shanoir.ng.processing.repository.DatasetProcessingRepository; +import org.shanoir.ng.shared.event.ShanoirEvent; +import org.shanoir.ng.shared.event.ShanoirEventType; +import org.shanoir.ng.shared.exception.ErrorModel; +import org.shanoir.ng.shared.exception.RestServiceException; +import org.shanoir.ng.utils.DatasetFileUtils; +import org.shanoir.ng.utils.KeycloakUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.net.URL; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +@Service +public class ProcessingDownloaderServiceImpl extends DatasetDownloaderServiceImpl{ + + /** Number of downloadable datasets. */ + private static final int DATASET_LIMIT = 500; + + @Autowired + private WADODownloaderService downloader; + @Autowired + private DatasetProcessingRepository datasetProcessingRepository; + @Autowired + private DatasetProcessingServiceImpl datasetProcessingService; + + public void massiveDownload(List processingList, boolean resultOnly, String format, HttpServletResponse response, boolean withManifest, Long converterId) throws RestServiceException { + manageResultOnly(processingList, resultOnly); + + response.setContentType("application/zip"); + response.setHeader("Content-Disposition", "attachment;filename=Processings_" + LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss"))); + Map downloadResults = new HashMap(); + Map> filesByAcquisitionId = new HashMap<>(); + + try (ZipOutputStream zipOutputStream = new ZipOutputStream(response.getOutputStream())) { + manageProcessingsDownload(processingList, downloadResults, zipOutputStream, format, withManifest, filesByAcquisitionId, converterId); + + String ids = String.join(",", Stream.concat(processingList.stream().map(DatasetProcessing::getInputDatasets), processingList.stream().map(DatasetProcessing::getOutputDatasets)).map(dataset -> ((Dataset) dataset).getId().toString()).collect(Collectors.toList())); + ShanoirEvent event = new ShanoirEvent(ShanoirEventType.DOWNLOAD_DATASET_EVENT, ids, + KeycloakUtil.getTokenUserId(), ids + "." + format, ShanoirEvent.IN_PROGRESS); + event.setStatus(ShanoirEvent.SUCCESS); + eventService.publishEvent(event); + } catch (Exception e) { + response.setContentType(null); + LOG.error("Unexpected error while downloading dataset files.", e); + throw new RestServiceException( + new ErrorModel(HttpStatus.UNPROCESSABLE_ENTITY.value(), + "Unexpected error while downloading dataset files")); + } + } + + private void manageProcessingsDownload(List processingList, Map downloadResults, ZipOutputStream zipOutputStream, String format, boolean withManifest, Map> filesByAcquisitionId, Long converterId) throws RestServiceException, IOException { + for (DatasetProcessing processing : processingList) { + String processingFilePath = getExecFilepath(processing.getId(), getExaminationDatas(processing.getInputDatasets())); + String subjectName = getProcessingSubject(processing); + for (Dataset dataset : processing.getInputDatasets()) { + manageDatasetDownload(dataset, downloadResults, zipOutputStream, subjectName, processingFilePath + "/" + shapeForPath(dataset.getName()), format, withManifest, filesByAcquisitionId, converterId); + } + for (Dataset dataset : processing.getOutputDatasets()) { + manageDatasetDownload(dataset, downloadResults, zipOutputStream, subjectName, processingFilePath + "/output", format, withManifest, filesByAcquisitionId, converterId); + } + } + if(!filesByAcquisitionId.isEmpty()){ + DatasetFileUtils.writeManifestForExport(zipOutputStream, filesByAcquisitionId); + } + + // Write errors to the file + if (!downloadResults.isEmpty()) { + ZipEntry zipEntry = new ZipEntry(JSON_RESULT_FILENAME); + zipEntry.setTime(System.currentTimeMillis()); + zipOutputStream.putNextEntry(zipEntry); + zipOutputStream.write(objectMapper.writeValueAsString(downloadResults).getBytes()); + zipOutputStream.closeEntry(); + } + } + + public void massiveDownloadByExaminations(List examinationList, String processingComment, boolean resultOnly, String format, HttpServletResponse response, boolean withManifest, Long converterId) throws RestServiceException { + List processingIdsList = datasetProcessingRepository.findAllIdsByExaminationIds(examinationList.stream().map(Examination::getId).toList()); + List processingList = datasetProcessingService.findAllById(processingIdsList); + if(!Objects.isNull(processingComment)){ + processingList = processingList.stream().filter(it -> Objects.equals(it.getComment(), processingComment)).toList(); + }; + massiveDownload(processingList, resultOnly, format, response, withManifest, converterId); + } + + private void manageResultOnly(List processingList, boolean resultOnly) { + if(resultOnly){ + processingList.forEach(it -> {it.setOutputDatasets(it.getOutputDatasets().stream().filter(file -> Objects.equals(file.getName(), "result.yaml")).toList()); it.setInputDatasets(new ArrayList<>());}); + } + } + + private String getProcessingSubject(DatasetProcessing processing) { + Examination exam = null; + for (Dataset dataset : processing.getInputDatasets()){ + exam = Optional.ofNullable(dataset) + .map(Dataset::getDatasetAcquisition) + .map(DatasetAcquisition::getExamination) + .orElse(null); + if (!Objects.isNull(exam)){ + return exam.getSubject().getName(); + } + } + return "noSubject"; + } + + private Pair getExaminationDatas(List inputs) { + Examination exam = null; + for (Dataset dataset : inputs){ + exam = Optional.ofNullable(dataset) + .map(Dataset::getDatasetAcquisition) + .map(DatasetAcquisition::getExamination) + .orElse(null); + if (!Objects.isNull(exam)){ + return new Pair<>(exam.getId(), exam.getComment()); + } + } + + return new Pair<>(0L, ""); + } + + private String getExecFilepath(Long processingId, Pair examDatas) { + + String execFilePath = "processing_" + processingId + "_exam_" + examDatas.first(); + if (!Objects.equals(examDatas.second(), "")) { + execFilePath += "_" + examDatas.second(); + } + return shapeForPath(execFilePath); + } + + private String shapeForPath(String path){ + path = path.replaceAll("[^a-zA-Z0-9_\\-]", "_"); + if (path.length() > 255) { + path = path.substring(0, 254); + } + return path; + } +} diff --git a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/vip/controller/PathApiController.java b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/vip/controller/PathApiController.java index 59dfb04c71..cda3882bd9 100644 --- a/shanoir-ng-datasets/src/main/java/org/shanoir/ng/vip/controller/PathApiController.java +++ b/shanoir-ng-datasets/src/main/java/org/shanoir/ng/vip/controller/PathApiController.java @@ -9,6 +9,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; @@ -23,6 +24,7 @@ public class PathApiController implements PathApi { private static final String DCM = "dcm"; + @Qualifier("datasetDownloaderServiceImpl") @Autowired private DatasetDownloaderServiceImpl datasetDownloaderService; diff --git a/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetApiControllerTest.java b/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetApiControllerTest.java index 7e1f271a71..42d343d70e 100644 --- a/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetApiControllerTest.java +++ b/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetApiControllerTest.java @@ -56,6 +56,7 @@ import org.shanoir.ng.utils.usermock.WithMockKeycloakUser; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; import org.springframework.boot.test.mock.mockito.MockBean; @@ -154,6 +155,7 @@ public class DatasetApiControllerTest { private DicomSEGAndSRImporterService dicomSRImporterService; @MockBean + @Qualifier("datasetDownloaderServiceImpl") private DatasetDownloaderServiceImpl datasetDownloaderService; @MockBean diff --git a/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetDownloaderServiceTest.java b/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetDownloaderServiceTest.java index f3695a8c46..eca46410ad 100644 --- a/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetDownloaderServiceTest.java +++ b/shanoir-ng-datasets/src/test/java/org/shanoir/ng/dataset/DatasetDownloaderServiceTest.java @@ -40,6 +40,7 @@ import org.shanoir.ng.shared.security.ControlerSecurityService; import org.shanoir.ng.utils.usermock.WithMockKeycloakUser; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.mock.web.MockHttpServletResponse; @@ -61,10 +62,11 @@ @SpringBootTest @ActiveProfiles("test") public class DatasetDownloaderServiceTest { - - @Autowired + + @Qualifier("datasetDownloaderServiceImpl") + @Autowired DatasetDownloaderServiceImpl datasetDownloaderService; - + @MockBean private DatasetService datasetServiceMock;