Skip to content

Commit

Permalink
Merge branch 'develop' into dependabot/maven/shanoir-ng-keycloak-auth…
Browse files Browse the repository at this point in the history
…/org.keycloak-keycloak-core-26.0.6
  • Loading branch information
pierrehenri-dauvergne authored Dec 20, 2024
2 parents 7ebe4e5 + fce5bbf commit f34647d
Show file tree
Hide file tree
Showing 308 changed files with 9,102 additions and 11,314 deletions.
4 changes: 2 additions & 2 deletions docker-compose/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -342,11 +342,11 @@ CMD ["nginx", "-g", "daemon off;"]

################ front-dev #################################################

FROM node:lts-alpine3.17 as front-dev
FROM node:lts-alpine3.21 as front-dev

WORKDIR /app

RUN npm install -g @angular/cli@11.2.14
RUN npm install -g @angular/cli@19.0.1



Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE shanoir_metadata ADD COLUMN sorting_index int(11);
1 change: 1 addition & 0 deletions docker-compose/solr/core/schema.xml
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@
<field name="studyName" type="lowercase"/>
<field name="subjectName" type="lowercase"/>
<field name="subjectId" type="plong"/>
<field name="sortingIndex" type="pint"/>
<field name="subjectType" type="lowercase"/>
<field name="tags" type="strings"/>
<field name="processed" type="boolean" indexed="true" stored="true" required="true" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,15 @@ public void anonymize(ArrayList<File> dicomFiles, String profile) throws Excepti
tagsToDeleteForManufacturer = AnonymizationRulesSingleton.getInstance().getTagsToDeleteForManufacturer();
// init here for multi-threading reasons
Map<String, String> seriesInstanceUIDs = new HashMap<>();
Map<String, String> frameOfReferenceUIDs = new HashMap<>();
Map<String, String> studyInstanceUIDs = new HashMap<>();
Map<String, String> studyIds = new HashMap<>();
LOG.debug("anonymize : totalAmount={}", totalAmount);
int current = 0;
for (int i = 0; i < dicomFiles.size(); ++i) {
final File file = dicomFiles.get(i);
// Perform the anonymization
performAnonymization(file, anonymizationMap, false, "", "", seriesInstanceUIDs, studyInstanceUIDs, studyIds);
performAnonymization(file, anonymizationMap, false, "", "", seriesInstanceUIDs, frameOfReferenceUIDs, studyInstanceUIDs, studyIds);
current++;
final int currentPercent = current * 100 / totalAmount;
LOG.debug("anonymize : anonymization current percent= {} %", currentPercent);
Expand All @@ -99,14 +100,15 @@ public void anonymizeForShanoir(ArrayList<File> dicomFiles, String profile, Stri

// init here for multi-threading reasons
Map<String, String> seriesInstanceUIDs = new HashMap<>();
Map<String, String> frameOfReferenceUIDs = new HashMap<>();
Map<String, String> studyInstanceUIDs = new HashMap<>();
Map<String, String> studyIds = new HashMap<>();
LOG.debug("anonymize : totalAmount={}", totalAmount);
int current = 0;
for (int i = 0; i < dicomFiles.size(); ++i) {
final File file = dicomFiles.get(i);
// Perform the anonymization
performAnonymization(file, anonymizationMap, true, patientName, patientID, seriesInstanceUIDs, studyInstanceUIDs, studyIds);
performAnonymization(file, anonymizationMap, true, patientName, patientID, seriesInstanceUIDs, frameOfReferenceUIDs, studyInstanceUIDs, studyIds);
current++;
final int currentPercent = current * 100 / totalAmount;
LOG.debug("anonymize : anonymization current percent= {} %", currentPercent);
Expand Down Expand Up @@ -156,7 +158,7 @@ private void anonymizePatientMetaData(Attributes attributes, String patientName,
* @throws Exception
*/
public void performAnonymization(final File dicomFile, Map<String, String> anonymizationMap, boolean isShanoirAnonymization,
String patientName, String patientID, Map<String, String> seriesInstanceUIDs,
String patientName, String patientID, Map<String, String> seriesInstanceUIDs, Map<String, String> frameOfReferenceUIDs,
Map<String, String> studyInstanceUIDs, Map<String, String> studyIds) throws Exception {
DicomInputStream din = null;
DicomOutputStream dos = null;
Expand Down Expand Up @@ -213,18 +215,17 @@ public void performAnonymization(final File dicomFile, Map<String, String> anony
anonymizeTag(tagInt, action, datasetAttributes);
// even: public tags
} else if (anonymizationMap.containsKey(tagString)) {
if (tagInt == Tag.SOPInstanceUID) {
anonymizeSOPInstanceUID(tagInt, datasetAttributes, mediaStorageSOPInstanceUIDGenerated);
} else if (tagInt == Tag.SeriesInstanceUID) {
anonymizeSeriesInstanceUID(tagInt, datasetAttributes, seriesInstanceUIDs);
} else if (tagInt == Tag.StudyInstanceUID) {
anonymizeStudyInstanceUID(tagInt, datasetAttributes, studyInstanceUIDs);
} else if (tagInt == Tag.StudyID) {
anonymizeStudyId(tagInt, datasetAttributes, studyIds);
} else {
final String action = anonymizationMap.get(tagString);
anonymizeTag(tagInt, action, datasetAttributes);
}
switch (tagInt) {
case Tag.SOPInstanceUID -> anonymizeSOPInstanceUID(tagInt, datasetAttributes, mediaStorageSOPInstanceUIDGenerated);
case Tag.SeriesInstanceUID -> anonymizeUID(tagInt, datasetAttributes, seriesInstanceUIDs);
case Tag.FrameOfReferenceUID -> anonymizeUID(tagInt, datasetAttributes, frameOfReferenceUIDs);
case Tag.StudyInstanceUID -> anonymizeUID(tagInt, datasetAttributes, studyInstanceUIDs);
case Tag.StudyID -> anonymizeStudyId(tagInt, datasetAttributes, studyIds);
default -> {
final String action = anonymizationMap.get(tagString);
anonymizeTag(tagInt, action, datasetAttributes);
}
}
} else {
if (0x50000000 <= tagInt && tagInt <= 0x50FFFFFF) {
final String action = anonymizationMap.get(CURVE_DATA_TAGS);
Expand Down Expand Up @@ -350,46 +351,6 @@ private void anonymizeSOPInstanceUID(int tagInt, Attributes attributes, String m
anonymizeTagAccordingToVR(attributes, tagInt, mediaStorageSOPInstanceUID);
}

private void anonymizeSeriesInstanceUID(int tagInt, Attributes attributes, Map<String, String> seriesInstanceUIDs) {
String value;
if (seriesInstanceUIDs != null && seriesInstanceUIDs.size() != 0
&& seriesInstanceUIDs.get(attributes.getString(tagInt)) != null) {
value = seriesInstanceUIDs.get(attributes.getString(tagInt));
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
seriesInstanceUIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

private void anonymizeStudyInstanceUID(int tagInt, Attributes attributes, Map<String, String> studyInstanceUIDs) {
String value;
if (studyInstanceUIDs != null && studyInstanceUIDs.size() != 0
&& studyInstanceUIDs.get(attributes.getString(tagInt)) != null) {
value = studyInstanceUIDs.get(attributes.getString(tagInt));
LOG.debug("Existing StudyInstanceUID reused: {}", value);
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
LOG.info("New StudyInstanceUID generated for DICOM study/exam: {}", newUID);
studyInstanceUIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

private void anonymizeStudyId(int tagInt, Attributes attributes, Map<String, String> studyIds) {
String value;
if (studyIds != null && studyIds.size() != 0 && studyIds.get(attributes.getString(tagInt)) != null) {
Expand Down Expand Up @@ -523,4 +484,30 @@ else if (vr.equals(VR.AE) || vr.equals(VR.AS) || vr.equals(VR.CS) || vr.equals(V
// VR.OD = Other Double String
}

private void anonymizeUID(int tagInt, Attributes attributes, Map<String, String> UIDs) {
String value;
if (UIDs != null && UIDs.size() != 0
&& UIDs.get(attributes.getString(tagInt)) != null) {
value = UIDs.get(attributes.getString(tagInt));
// We log only concerning the studyInstanceUID
if(Tag.StudyInstanceUID == tagInt) {
LOG.debug("Existing StudyInstanceUID reused: {}", value);
}
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
if(Tag.StudyInstanceUID == tagInt) {
LOG.info("New StudyInstanceUID generated for DICOM study/exam: {}", newUID);
}
UIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -312,21 +312,15 @@ public void createDatasetAcquisition(final String studyStr) {


/**
* Receives a shanoirEvent as a json object, concerning a subject deletion
* @param eventAsString the task as a json string.
*/
@RabbitListener(bindings = @QueueBinding(
key = ShanoirEventType.DELETE_SUBJECT_EVENT,
value = @Queue(value = RabbitMQConfiguration.DELETE_SUBJECT_QUEUE, durable = "true"),
exchange = @Exchange(value = RabbitMQConfiguration.EVENTS_EXCHANGE, ignoreDeclarationExceptions = "true",
autoDelete = "false", durable = "true", type=ExchangeTypes.TOPIC)), containerFactory = "singleConsumerFactory"
)
* Receives a shanoirEvent as a json object, concerning a subject deletion
* @param subjectIdAsString a string of the subject's id
*/
@RabbitListener(queues = RabbitMQConfiguration.DELETE_SUBJECT_QUEUE, containerFactory = "singleConsumerFactory")
@Transactional
public void deleteSubject(String eventAsString) throws AmqpRejectAndDontRequeueException {
public void deleteSubject(String subjectIdAsString) throws AmqpRejectAndDontRequeueException {
SecurityContextUtil.initAuthenticationContext("ROLE_ADMIN");
try {
ShanoirEvent event = objectMapper.readValue(eventAsString, ShanoirEvent.class);
Long subjectId = Long.valueOf(event.getObjectId());
Long subjectId = Long.valueOf(subjectIdAsString);
Set<Long> studyIds = new HashSet<>();

// Inverse order to remove copied examination before its source (if copied)
Expand Down Expand Up @@ -477,7 +471,7 @@ public void copyDatasetsToStudy(final String data) {
Long dsCount = datasetRepository.countDatasetsBySourceIdAndStudyId(datasetParentId, studyId);
Dataset datasetParent = datasetService.findById(datasetParentId);

if (datasetParent.getSourceId() != null) {
if (datasetParent.getSource() != null) {
LOG.info("[CopyDatasets] Selected dataset is a copy, please pick the original dataset.");
countCopy++;
} else if (dsCount != 0) {
Expand All @@ -502,7 +496,8 @@ public void copyDatasetsToStudy(final String data) {
event.setStatus(ShanoirEvent.SUCCESS);
event.setProgress(1.0f);
eventService.publishEvent(event);
solrService.indexDatasets(newDatasets);
if (newDatasets.size() > 0)
solrService.indexDatasets(newDatasets);

} catch (Exception e) {
if (event != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
import org.slf4j.LoggerFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.data.domain.Page;
Expand Down Expand Up @@ -139,8 +140,9 @@ public class DatasetApiController implements DatasetApi {
@Autowired
private SolrService solrService;

@Autowired
DatasetDownloaderServiceImpl datasetDownloaderService;
@Qualifier("datasetDownloaderServiceImpl")
@Autowired
protected DatasetDownloaderServiceImpl datasetDownloaderService;

@Autowired
private ObjectMapper objectMapper;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,11 @@ public class DatasetDTO {

private List<StudyTagDTO> tags;

private Long source;

private List<Long> copies;


/**
* @return the creationDate
*/
Expand Down Expand Up @@ -198,4 +203,20 @@ public List<StudyTagDTO> getTags() {
public void setTags(List<StudyTagDTO> tags) {
this.tags = tags;
}

public Long getSource() {
return source;
}

public void setSource(Long source) {
this.source = source;
}

public List<Long> getCopies() {
return copies;
}

public void setCopies(List<Long> copies) {
this.copies = copies;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,11 @@
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;

import org.hibernate.Hibernate;
import org.shanoir.ng.dataset.dto.DatasetDTO;
import org.shanoir.ng.dataset.dto.DatasetWithDependenciesDTO;
import org.shanoir.ng.dataset.modality.EegDataset;
import org.shanoir.ng.dataset.modality.EegDatasetMapper;
import org.shanoir.ng.dataset.modality.MrDataset;
Expand Down Expand Up @@ -79,4 +82,13 @@ public IdName datasetToIdNameDTO(final Dataset dataset) {
return defaultMapper.datasetToIdNameDTO(dataset);
}

@Override
public DatasetWithDependenciesDTO datasetToDatasetWithParentsAndProcessingsDTO(Dataset dataset) {
final DatasetWithDependenciesDTO datasetDTO = defaultMapper.datasetToDatasetWithParentsAndProcessingsDTO(dataset);
Hibernate.initialize(dataset.getCopies());
datasetDTO.setCopies(dataset.getCopies().stream()
.map(Dataset::getId)
.collect(Collectors.toList()));
return datasetDTO;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@

package org.shanoir.ng.dataset.dto.mapper;

import org.mapstruct.DecoratedWith;
import org.mapstruct.IterableMapping;
import org.mapstruct.Mapper;
import org.mapstruct.Named;
import org.mapstruct.*;
import org.shanoir.ng.dataset.dto.DatasetWithDependenciesDTO;
import org.shanoir.ng.dataset.dto.DatasetDTO;
import org.shanoir.ng.dataset.model.Dataset;
Expand All @@ -28,6 +25,7 @@
import org.springframework.data.domain.Page;

import java.util.List;
import java.util.stream.Collectors;

/**
* Mapper for datasets.
Expand Down Expand Up @@ -66,6 +64,8 @@ public interface DatasetMapper {
* @return dataset DTO.
*/
@Named(value = "withProcessings")
@Mapping(target = "copies", expression = "java(mapCopiesFromDataset(dataset.getCopies()))")
@Mapping(target = "source", expression = "java(mapSourceFromDataset(dataset.getSource()))")
DatasetWithDependenciesDTO datasetToDatasetWithParentsAndProcessingsDTO(Dataset dataset);

/**
Expand Down Expand Up @@ -98,4 +98,25 @@ public interface DatasetMapper {
*/
IdName datasetToIdNameDTO(Dataset dataset);

default List<Long> mapCopiesFromDataset(List<Dataset> copies) {
if (copies == null) {
return null;
}
return copies.stream()
.map(Dataset::getId)
.collect(Collectors.toList());
}

default Long mapSourceFromDataset(Dataset source) {
return source != null ? source.getId() : null;
}


default List<Dataset> mapCopiesFromLong(List<Long> copies) {
return null;
}

default Dataset mapSourceFromLong(Long source) {
return null;
}
}
Loading

0 comments on commit f34647d

Please sign in to comment.