Skip to content

Commit

Permalink
Merge pull request #1895 from michaelkain/auto-test-import
Browse files Browse the repository at this point in the history
ShanoirUploader: auto test import + perf optimization QueryPACSService
  • Loading branch information
jcomedouteau authored Oct 23, 2023
2 parents 8abdf01 + 1a87c3d commit 6791ed0
Show file tree
Hide file tree
Showing 40 changed files with 515 additions and 315 deletions.
4 changes: 1 addition & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ docker-compose/boutiques/boutiques
shanoir-ng-preclinical/null*
shanoir-uploader/src/main/resources/key
shanoir-uploader/src/main/resources/profile.OFSEP/key
shanoir-uploader/src/main/resources/profile.OFSEP-qualif/key
shanoir-uploader/src/main/resources/profile.OFSEP-NG-qualif/key
shanoir-uploader/src/main/resources/profile.OFSEP-NG/key
shanoir-uploader/src/main/resources/profile.OFSEP-Qualif/key
shanoir-uploader/src/main/resources/pseudonymus
shanoir-uploader/src/test/resources/acr_phantom_t1/
shanoir-ng-tests/tests/geckodriver.log
Expand Down
Binary file modified shanoir-ng-anonymization/src/main/resources/anonymization.xlsx
Binary file not shown.
2 changes: 1 addition & 1 deletion shanoir-ng-back/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.html
<java.version>17</java.version>
<mapstruct.version>1.5.3.Final</mapstruct.version>
<keycloak.version>22.0.1</keycloak.version>
<dcm4che.version>5.30.0</dcm4che.version>
<dcm4che.version>5.31.0</dcm4che.version>
</properties>

<dependencyManagement>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ public enum ContrastAgentUsed {
GADOVIST(7),

// Clariscan
CLARISCAN(8);
CLARISCAN(8),

// Dotarem
DOTAREM(9);

private int id;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,10 +245,11 @@ public void setSequenceName(String sequenceName) {

public DatasetFile getFirstDatasetFileForCurrentSerie() {
if (getDatasets() == null
|| getDatasets().get(0) == null
|| getDatasets().get(0).getExpressionFormats() == null
|| getDatasets().get(0).getExpressionFormats().get(0) == null
|| getDatasets().get(0).getExpressionFormats().get(0).getDatasetFiles() == null) {
|| getDatasets().get(0) == null
|| getDatasets().get(0).getExpressionFormats() == null
|| getDatasets().get(0).getExpressionFormats().get(0) == null
|| getDatasets().get(0).getExpressionFormats().get(0).getDatasetFiles() == null
|| getDatasets().get(0).getExpressionFormats().get(0).getDatasetFiles().get(0) == null) {
return null;
}
return getDatasets().get(0).getExpressionFormats().get(0).getDatasetFiles().get(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ private Set<DatasetAcquisition> generateAcquisitions(Examination examination, Im
try {
dicomAttributes = dicomProcessing.getDicomObjectAttributes(serie.getFirstDatasetFileForCurrentSerie(), serie.getIsEnhanced());
} catch (IOException e) {
throw new ShanoirException("Unable to retrieve dicom attributes in file " + serie.getFirstDatasetFileForCurrentSerie().getPath(), e);
throw new ShanoirException("Unable to retrieve dicom attributes in serie: " + serie.getSeriesDescription(), e);
}

// Generate acquisition object with all sub objects : datasets, protocols, expressions, ...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


<form [formGroup]="form" novalidate>
<div class="header command-zone">1. Query Neurinfo PACS</div>
<div class="header command-zone">1. Query PACS</div>
<fieldset class="step">
<ol>
<legend>
Expand Down Expand Up @@ -69,4 +69,4 @@
</ol>
</fieldset>
<button class="next" [disabled]="!form.valid" (click)="queryPACS()">Query</button>
</form>
</form>
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -75,8 +74,6 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.HttpStatusCode;
import org.springframework.http.MediaType;
Expand All @@ -87,7 +84,6 @@
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.multipart.MultipartFile;

import com.fasterxml.jackson.databind.ObjectMapper;
Expand Down Expand Up @@ -145,9 +141,6 @@ public class ImporterApiController implements ImporterApi {
@Value("${shanoir.import.directory}")
private String importDir;

@Autowired
private RestTemplate restTemplate;

@Autowired
private DicomDirGeneratorService dicomDirGeneratorService;

Expand Down Expand Up @@ -268,7 +261,6 @@ public ResponseEntity<Void> startImportJob(
final File importJobDir = new File(userImportDir, tempDirId);
if (importJobDir.exists()) {
importJob.setWorkFolder(importJobDir.getAbsolutePath());
cleanSeries(importJob);
LOG.info("Starting import job for user {} (userId: {}) with import job folder: {}", KeycloakUtil.getTokenUserName(), userId, importJob.getWorkFolder());
importerManagerService.manageImportJob(importJob);
return new ResponseEntity<>(HttpStatus.OK);
Expand All @@ -279,24 +271,6 @@ public ResponseEntity<Void> startImportJob(
}
}

private void cleanSeries(final ImportJob importJob) {
for (Iterator<Patient> patientIt = importJob.getPatients().iterator(); patientIt.hasNext();) {
Patient patient = patientIt.next();
List<Study> studies = patient.getStudies();
for (Iterator<Study> studyIt = studies.iterator(); studyIt.hasNext();) {
Study study = studyIt.next();
List<Serie> series = study.getSeries();
for (Iterator<Serie> serieIt = series.iterator(); serieIt.hasNext();) {
Serie serie = serieIt.next();
if (serie.isIgnored() || serie.isErroneous() || !serie.getSelected()) {
LOG.info("Serie {} cleaned from import (ignored, erroneous, not selected).", serie.getSeriesDescription());
serieIt.remove();
}
}
}
}
}

@Override
public ResponseEntity<ImportJob> queryPACS(
@Parameter(name = "DicomQuery", required = true) @Valid @RequestBody final DicomQuery dicomQuery)
Expand All @@ -309,7 +283,6 @@ public ResponseEntity<ImportJob> queryPACS(
importJob.setWorkFolder("");
importJob.setFromPacs(true);
importJob.setUserId(KeycloakUtil.getTokenUserId());

} catch (ShanoirException e) {
throw new RestServiceException(
new ErrorModel(HttpStatus.UNPROCESSABLE_ENTITY.value(), e.getMessage(), null));
Expand All @@ -332,12 +305,11 @@ public ResponseEntity<ImportJob> importDicomZipFile(
MockMultipartFile multiPartFile;
try {
multiPartFile = new MockMultipartFile(tempFile.getName(), tempFile.getName(), APPLICATION_ZIP, new FileInputStream(tempFile.getAbsolutePath()));

// Import dicomfile
return uploadDicomZipFile(multiPartFile);
} catch (IOException e) {
LOG.error("ERROR while loading zip fiole, please contact an administrator");
e.printStackTrace();
LOG.error(e.getMessage(), e);
return new ResponseEntity<>(HttpStatus.NOT_ACCEPTABLE);
} finally {
// Delete temp file which is useless now
Expand Down Expand Up @@ -374,12 +346,9 @@ public ResponseEntity<EegImportJob> uploadEEGZipFile(
if (!userImportDir.exists()) {
userImportDir.mkdirs(); // create if not yet existing
}

// Unzip the file and get the elements
File tempFile = ImportUtils.saveTempFile(userImportDir, eegFile);

File importJobDir = ImportUtils.saveTempFileCreateFolderAndUnzip(tempFile, eegFile, false);

EegImportJob importJob = new EegImportJob();
importJob.setUserId(userId);
importJob.setArchive(eegFile.getOriginalFilename());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,35 +142,18 @@ public void manageImportJob(final ImportJob importJob) {
} else {
throw new ShanoirException("Unsupported type of import.");
}
// we do the clean series here: at this point we are sure for all imports, that the ImagesCreatorAndDicomFileAnalyzer
// has been run and correctly classified everything. So no need to check afterwards for erroneous series.
cleanSeries(importJob);

event.setProgress(0.25F);
eventService.publishEvent(event);

for (Iterator<Patient> patientsIt = patients.iterator(); patientsIt.hasNext();) {
Patient patient = patientsIt.next();
// perform anonymization only in case of profile explicitly set
if (importJob.getAnonymisationProfileToUse() == null || !importJob.getAnonymisationProfileToUse().isEmpty()) {
String anonymizationProfile = (String) this.rabbitTemplate.convertSendAndReceive(RabbitMQConfiguration.STUDY_ANONYMISATION_PROFILE_QUEUE, importJob.getStudyId());
importJob.setAnonymisationProfileToUse(anonymizationProfile);
}
ArrayList<File> dicomFiles = getDicomFilesForPatient(importJob, patient, importJobDir.getAbsolutePath());
final Subject subject = patient.getSubject();

if (subject == null) {
LOG.error("Error: subject == null in importJob.");
throw new ShanoirException("Error: subject == null in importJob.");
}

final String subjectName = subject.getName();

event.setMessage("Pseudonymizing DICOM files for subject [" + subjectName + "]...");
eventService.publishEvent(event);

try {
ANONYMIZER.anonymizeForShanoir(dicomFiles, importJob.getAnonymisationProfileToUse(), subjectName, subjectName);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new ShanoirException("Error during pseudonymization.");
// DICOM files coming from ShUp are already pseudonymized
if (!importJob.isFromShanoirUploader()) {
pseudonymize(importJob, event, importJobDir, patient);
}
Long converterId = importJob.getConverterId();
datasetsCreatorAndNIfTIConverter.createDatasetsAndRunConversion(patient, importJobDir, converterId, importJob);
Expand All @@ -188,16 +171,62 @@ public void manageImportJob(final ImportJob importJob) {
}
}

/**
* cleanSeries is important here for import-from-zip file: when the ImagesCreatorAndDicomFileAnalyzer
* has declared some series as e.g. erroneous, we have to remove them from the import. For import-from
* pacs or from-sh-up it is different, as the ImagesCreatorAndDicomFileAnalyzer is called afterwards.
* Same here for multi-exam-imports: it calls uploadDicomZipFile method, where series could be classed
* as erroneous and when startImportJob is called, we want them to be removed from the import.
*
* @param importJob
*/
private void cleanSeries(final ImportJob importJob) {
for (Iterator<Patient> patientIt = importJob.getPatients().iterator(); patientIt.hasNext();) {
Patient patient = patientIt.next();
List<Study> studies = patient.getStudies();
for (Iterator<Study> studyIt = studies.iterator(); studyIt.hasNext();) {
Study study = studyIt.next();
List<Serie> series = study.getSeries();
for (Iterator<Serie> serieIt = series.iterator(); serieIt.hasNext();) {
Serie serie = serieIt.next();
if (serie.isIgnored() || serie.isErroneous() || !serie.getSelected()) {
LOG.info("Serie {} cleaned from import (ignored, erroneous, not selected).", serie.getSeriesDescription());
serieIt.remove();
}
}
}
}
}

private void pseudonymize(final ImportJob importJob, ShanoirEvent event, final File importJobDir, Patient patient)
throws FileNotFoundException, ShanoirException {
if (importJob.getAnonymisationProfileToUse() == null || !importJob.getAnonymisationProfileToUse().isEmpty()) {
ArrayList<File> dicomFiles = getDicomFilesForPatient(importJob, patient, importJobDir.getAbsolutePath());
final Subject subject = patient.getSubject();
if (subject == null) {
LOG.error("Error: subject == null in importJob.");
throw new ShanoirException("Error: subject == null in importJob.");
}
final String subjectName = subject.getName();
event.setMessage("Pseudonymizing DICOM files for subject [" + subjectName + "]...");
eventService.publishEvent(event);
try {
ANONYMIZER.anonymizeForShanoir(dicomFiles, importJob.getAnonymisationProfileToUse(), subjectName, subjectName);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new ShanoirException("Error during pseudonymization.");
}
}
}

private void sendFailureMail(ImportJob importJob, String errorMessage) {
EmailDatasetImportFailed generatedMail = new EmailDatasetImportFailed();
generatedMail.setExaminationId(importJob.getExaminationId().toString());
generatedMail.setStudyId(importJob.getStudyId().toString());
generatedMail.setSubjectName(importJob.getSubjectName());
generatedMail.setStudyName(importJob.getStudyName());
generatedMail.setUserId(importJob.getUserId());

generatedMail.setErrorMessage(errorMessage != null ? errorMessage : "An unexpected error occured, please contact Shanoir support.");

sendMail(importJob, generatedMail);
}

Expand All @@ -208,7 +237,6 @@ private void sendFailureMail(ImportJob importJob, String errorMessage) {
*/
private void sendMail(ImportJob job, EmailBase email) {
List<Long> recipients = new ArrayList<>();

// Get all recpients
List<StudyUser> users = (List<StudyUser>) studyUserRightRepo.findByStudyId(job.getStudyId());
for (StudyUser user : users) {
Expand All @@ -221,7 +249,6 @@ private void sendMail(ImportJob job, EmailBase email) {
return;
}
email.setRecipients(recipients);

try {
rabbitTemplate.convertAndSend(RabbitMQConfiguration.IMPORT_DATASET_FAILED_MAIL_QUEUE, objectMapper.writeValueAsString(email));
} catch (AmqpException | JsonProcessingException e) {
Expand Down Expand Up @@ -310,8 +337,8 @@ private void downloadAndMoveDicomFilesToImportJobDir(final File importJobDir, Li

/**
* Using Java HashSet here to avoid duplicate files for Pseudonymization.
* For performance reasons already init with 5000 buckets, assuming,
* that we will normally never have more than 5000 files to process.
* For performance reasons already init with 10000 buckets, assuming,
* that we will normally never have more than 10000 files to process.
* Maybe to be evaluated later with more bigger imports.
*
* @param importJob
Expand All @@ -321,7 +348,7 @@ private void downloadAndMoveDicomFilesToImportJobDir(final File importJobDir, Li
* @throws FileNotFoundException
*/
private ArrayList<File> getDicomFilesForPatient(final ImportJob importJob, final Patient patient, final String workFolderPath) throws FileNotFoundException {
Set<File> pathsSet = new HashSet<>(5000);
Set<File> pathsSet = new HashSet<>(10000);
List<Study> studies = patient.getStudies();
for (Iterator<Study> studiesIt = studies.iterator(); studiesIt.hasNext();) {
Study study = studiesIt.next();
Expand Down
Loading

0 comments on commit 6791ed0

Please sign in to comment.