Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

#2488 : API for in/out download from a list of processingIds #2523

Merged
merged 10 commits into from
Dec 5, 2024
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
import org.slf4j.LoggerFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.data.domain.Page;
Expand Down Expand Up @@ -139,8 +140,9 @@ public class DatasetApiController implements DatasetApi {
@Autowired
private SolrService solrService;

@Autowired
DatasetDownloaderServiceImpl datasetDownloaderService;
@Qualifier("datasetDownloaderServiceImpl")
@Autowired
protected DatasetDownloaderServiceImpl datasetDownloaderService;

@Autowired
private ObjectMapper objectMapper;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,9 @@ public interface DatasetRepository extends PagingAndSortingRepository<Dataset, L
List<Dataset> deleteByDatasetProcessingId(Long id);

boolean existsByTagsContains(StudyTag tag);

@Query(value="SELECT ds.id FROM dataset as ds " +
"INNER JOIN input_of_dataset_processing as input ON ds.id=input.dataset_id " +
"WHERE input.processing_id = :processingId or ds.dataset_processing_id = :processingId", nativeQuery = true)
List<Dataset> findDatasetsByProcessingId(Long processingId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -1103,5 +1103,60 @@ public boolean hasRightOnExamination(String examinationUID, String rightStr) thr
Long id = studyInstanceUIDHandler.extractExaminationId(examinationUID);
return hasRightOnExamination(id, rightStr);
}


public boolean HasRightOnEveryDatasetOfProcessings(List<Long> processingIds, String rightStr) {
boolean hasRight = true;

for (Long processingId : processingIds) {
if (KeycloakUtil.getTokenRoles().contains("ROLE_ADMIN") || processingId == null) {
continue;
}
Iterable<Dataset> datasets = datasetRepository.findDatasetsByProcessingId(processingId);

for (Dataset dataset : datasets) {
michaelkain marked this conversation as resolved.
Show resolved Hide resolved
if (dataset.getDatasetAcquisition() == null
|| dataset.getDatasetAcquisition().getExamination() == null
|| dataset.getDatasetAcquisition().getExamination().getStudyId() == null) {

if (dataset.getDatasetProcessing() != null && dataset.getDatasetProcessing().getInputDatasets() != null) {
for (Dataset inputDs : dataset.getDatasetProcessing().getInputDatasets()) {
hasRight &= hasRightOnTrustedDataset(inputDs, rightStr);
}
} else {
throw new IllegalStateException("Cannot check dataset n°" + dataset.getId() + " rights, this dataset has neither examination nor processing parent !");
}
} else {
hasRight &= this.hasRightOnStudyCenter(dataset.getDatasetAcquisition().getExamination().getCenterId(), dataset.getDatasetAcquisition().getExamination().getStudyId(), rightStr);
}
}
}
return hasRight;
}

/**
* Check that the connected user has the given right for the given examination.
*
* @param examinationIds the examination ids
* @param rightStr the right
* @return true or false
* @throws EntityNotFoundException
*/
public boolean hasRightOnExaminations(List<Long> examinationIds, String rightStr) throws EntityNotFoundException {
if (KeycloakUtil.getTokenRoles().contains(ROLE_ADMIN)) {
return true;
}
for (Long examinationId : examinationIds) {
Examination exam = examinationRepository.findById(examinationId).orElse(null);
if (exam == null) {
throw new EntityNotFoundException("Cannot find examination with id " + examinationId);
}
if (exam.getStudyId() == null) {
return false;
}
if(!this.hasRightOnStudyCenter(exam.getCenterId(), exam.getStudyId(), rightStr)){
return false;
}
}
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -60,51 +60,49 @@
@Service
public class DatasetDownloaderServiceImpl {

private static final String FAILURES_TXT = "failures.txt";
protected static final String FAILURES_TXT = "failures.txt";

private static final String NII = "nii";
protected static final String NII = "nii";

private static final String DCM = "dcm";
protected static final String DCM = "dcm";

private static final String ZIP = ".zip";
protected static final String ZIP = ".zip";

private static final Logger LOG = LoggerFactory.getLogger(DatasetDownloaderServiceImpl.class);
protected static final Logger LOG = LoggerFactory.getLogger(DatasetDownloaderServiceImpl.class);

private static final String JSON_RESULT_FILENAME = "ERRORS.json";
protected static final String JSON_RESULT_FILENAME = "ERRORS.json";

private static final Long DEFAULT_NIFTI_CONVERTER_ID = 6L;
protected static final Long DEFAULT_NIFTI_CONVERTER_ID = 6L;

public static final String GZIP_EXTENSION = ".gz";
protected static final String GZIP_EXTENSION = ".gz";

public static final String NII_GZ = ".nii.gz";
public static final String CONVERSION_FAILED_ERROR_MSG = "Nifti conversion failed, you may try to select another one.";
protected static final String NII_GZ = ".nii.gz";

protected static final String CONVERSION_FAILED_ERROR_MSG = "Nifti conversion failed, you may try to select another one.";

@Autowired
DatasetService datasetService;

@Autowired
private WADODownloaderService downloader;
protected WADODownloaderService downloader;

@Autowired
private SubjectRepository subjectRepository;
protected SubjectRepository subjectRepository;

@Autowired
private StudyRepository studyRepository;
protected StudyRepository studyRepository;

@Autowired
private RabbitTemplate rabbitTemplate;
protected RabbitTemplate rabbitTemplate;

@Autowired
ShanoirEventService eventService;

SimpleDateFormat fileDateformatter = new SimpleDateFormat("yyyyMMddHHmmss");
protected ShanoirEventService eventService;

@Autowired
private ObjectMapper objectMapper;
protected ObjectMapper objectMapper;

@PostConstruct
private void initialize() {
protected void initialize() {
// Set timeout to 5mn (consider nifti reconversion can take some time)
this.rabbitTemplate.setReplyTimeout(300000);
}
Expand All @@ -115,7 +113,6 @@ public void massiveDownload(String format, List<Dataset> datasets, HttpServletRe
response.setContentType("application/zip");
response.setHeader("Content-Disposition",
"attachment;filename=" + getFileName(datasets));
SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmssSSS");
Map<Long, DatasetDownloadError> downloadResults = new HashMap<Long, DatasetDownloadError>();

try (ZipOutputStream zipOutputStream = new ZipOutputStream(response.getOutputStream())) {
Expand Down Expand Up @@ -165,15 +162,15 @@ public void massiveDownload(String format, List<Dataset> datasets, HttpServletRe
} else if (NII.equals(format)) {
// Check if we have a specific converter -> nifti reconversion
if (converterId != null) {
reconvertToNifti(format, response, converterId, dataset, formatter, pathURLs, downloadResult, subjectName, zipOutputStream);
reconvertToNifti(format, converterId, dataset, pathURLs, downloadResult, subjectName, zipOutputStream);
} else {
// Check that we have existing nifti, otherwise reconvert using dcm2niix by default.
DatasetFileUtils.getDatasetFilePathURLs(dataset, pathURLs, DatasetExpressionFormat.NIFTI_SINGLE_FILE, downloadResult);
if (!pathURLs.isEmpty()) {
List<String> files = DatasetFileUtils.copyNiftiFilesForURLs(pathURLs, zipOutputStream, dataset, subjectName, false, datasetFilePath);
} else {
// Reconvert using dcm2niix by default.
reconvertToNifti(format, response, DEFAULT_NIFTI_CONVERTER_ID, dataset, formatter, pathURLs, downloadResult, subjectName, zipOutputStream);
reconvertToNifti(format, DEFAULT_NIFTI_CONVERTER_ID, dataset, pathURLs, downloadResult, subjectName, zipOutputStream);
}
}
} else {
Expand Down Expand Up @@ -211,7 +208,7 @@ public void massiveDownload(String format, List<Dataset> datasets, HttpServletRe
}
}

private void reconvertToNifti(String format, HttpServletResponse response, Long converterId, Dataset dataset, SimpleDateFormat formatter, List<URL> pathURLs, DatasetDownloadError downloadResult, String subjectName, ZipOutputStream zipOutputStream) throws RestServiceException, IOException {
protected void reconvertToNifti(String format, Long converterId, Dataset dataset, List<URL> pathURLs, DatasetDownloadError downloadResult, String subjectName, ZipOutputStream zipOutputStream) throws RestServiceException, IOException {
File userDir = DatasetFileUtils.getUserImportDir("/tmp");
String tmpFilePath = userDir + File.separator + dataset.getId() + "_" + format;

Expand Down Expand Up @@ -272,7 +269,7 @@ private void reconvertToNifti(String format, HttpServletResponse response, Long
}
}

private String getSubjectName(Dataset dataset) {
protected String getSubjectName(Dataset dataset) {
String subjectName = "unknownSubject";
if(dataset.getSubjectId() != null){
Optional<Subject> subjectOpt = subjectRepository.findById(dataset.getSubjectId());
Expand All @@ -283,7 +280,8 @@ private String getSubjectName(Dataset dataset) {
return subjectName;
}

private String getFileName(List<Dataset> datasets) {
protected String getFileName(List<Dataset> datasets) {
SimpleDateFormat fileDateformatter = new SimpleDateFormat("yyyyMMddHHmmss");
if (datasets != null && datasets.size() == 1) {
String datasetName = getDatasetFileName(datasets.get(0));
return "Dataset_" + datasetName + "_" + fileDateformatter.format(new DateTime().toDate()) + ZIP;
Expand All @@ -292,7 +290,7 @@ private String getFileName(List<Dataset> datasets) {
}
}

private String getDatasetFileName(Dataset dataset) {
protected String getDatasetFileName(Dataset dataset) {
// Only one dataset -> the logic for one dataset is used
String subjectName = getSubjectName(dataset);

Expand All @@ -305,7 +303,7 @@ private String getDatasetFileName(Dataset dataset) {
return datasetName;
}

private String getDatasetFilepath(Dataset dataset, String studyName, String subjectName) {
protected String getDatasetFilepath(Dataset dataset, String studyName, String subjectName) {
Examination exam = datasetService.getExamination(dataset);

String datasetFilePath = studyName + "_" + subjectName + "_Exam-" + exam.getId();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,13 @@
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.validation.Valid;
import org.apache.solr.client.solrj.SolrServerException;
import org.shanoir.ng.dataset.dto.DatasetDTO;
import org.shanoir.ng.processing.dto.DatasetProcessingDTO;
import org.shanoir.ng.processing.model.DatasetProcessing;
import org.shanoir.ng.shared.exception.EntityNotFoundException;
import org.shanoir.ng.shared.exception.RestServiceException;
import org.shanoir.ng.shared.exception.ShanoirException;
import org.springframework.http.ResponseEntity;
Expand Down Expand Up @@ -120,4 +122,34 @@ ResponseEntity<Void> updateDatasetProcessing(
@Parameter(description = "dataset processing to update", required = true) @Valid @RequestBody DatasetProcessing datasetProcessing, BindingResult result)
throws RestServiceException;

@Operation(summary = "massiveDownloadDatasetsByProcessingIds", description = "If exists, returns a zip file of the inputs/outputs per processing corresponding to the given processing IDs")
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "zip file"),
@ApiResponse(responseCode = "401", description = "unauthorized"),
@ApiResponse(responseCode = "403", description = "forbidden"),
@ApiResponse(responseCode = "404", description = "no dataset found"),
@ApiResponse(responseCode = "500", description = "unexpected error") })
@GetMapping(value = "/massiveDownloadByProcessing")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.HasRightOnEveryDatasetOfProcessings(#processingIds, 'CAN_DOWNLOAD'))")
void massiveDownloadByProcessingId(
@Parameter(description = "id of the processing", required=true) @Valid
@RequestParam(value = "processingIds") List<Long> processingIds,
michaelkain marked this conversation as resolved.
Show resolved Hide resolved
@Parameter(description = "outputs to extract") @Valid
@RequestParam(value = "resultOnly") boolean resultOnly, HttpServletResponse response) throws RestServiceException;

@Operation(summary = "massiveDownloadProcessingDatasetsByExaminationIds", description = "If exists, returns a zip file of the inputs/outputs per processing corresponding to the given examination IDs")
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "zip file"),
@ApiResponse(responseCode = "401", description = "unauthorized"),
@ApiResponse(responseCode = "403", description = "forbidden"),
@ApiResponse(responseCode = "404", description = "no dataset found"),
@ApiResponse(responseCode = "500", description = "unexpected error") })
@GetMapping(value = "/massiveDownloadProcessingByExamination")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.hasRightOnExaminations(#examinationIds, 'CAN_DOWNLOAD'))")
void massiveDownloadProcessingByExaminationId(
michaelkain marked this conversation as resolved.
Show resolved Hide resolved
@Parameter(description = "id of the examination", required=true) @Valid
@RequestParam(value = "examiantionIds") List<Long> examinationIds,
@Parameter(description = "outputs to extract") @Valid
@RequestParam(value = "resultOnly") boolean resultOnly, HttpServletResponse response) throws RestServiceException;

}
Loading
Loading