diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml
index 9a6d7f5c13..ef3b9e1526 100644
--- a/.idea/codeStyles/Project.xml
+++ b/.idea/codeStyles/Project.xml
@@ -486,10 +486,8 @@
-
-
diff --git a/.run/metis-authentication(.properties).run.xml b/.run/metis-authentication(.properties).run.xml
index c086d4b414..2b70f0317c 100644
--- a/.run/metis-authentication(.properties).run.xml
+++ b/.run/metis-authentication(.properties).run.xml
@@ -2,11 +2,12 @@
-
+
-
+ value="file:///data/metis-configuration/metis-framework/metis-authentication/metis-authentication-rest/k8s/overlays/test/components/properties/application.properties"/>
+
diff --git a/.run/metis-core (.properties).run.xml b/.run/metis-core (.properties).run.xml
index bc96545a9e..98d47339b9 100644
--- a/.run/metis-core (.properties).run.xml
+++ b/.run/metis-core (.properties).run.xml
@@ -2,13 +2,12 @@
-
-
-
+
-
+ value="file:///data/metis-configuration/metis-framework/metis-core/metis-core-rest/k8s/overlays/test/components/properties/application.properties"/>
+
diff --git a/.run/metis-dereference(.properties).run.xml b/.run/metis-dereference(.properties).run.xml
index 4ec05005ed..6fa1623df6 100644
--- a/.run/metis-dereference(.properties).run.xml
+++ b/.run/metis-dereference(.properties).run.xml
@@ -2,11 +2,12 @@
-
+
-
+ value="file:///data/metis-configuration/metis-framework/metis-dereference/metis-dereference-rest/k8s/overlays/test/components/properties/application.properties"/>
+
diff --git a/.run/metis-repository (.properties).run.xml b/.run/metis-repository (.properties).run.xml
index 5a71d54331..537d519165 100644
--- a/.run/metis-repository (.properties).run.xml
+++ b/.run/metis-repository (.properties).run.xml
@@ -5,7 +5,9 @@
+ value="file:///data/metis-configuration/metis-framework/metis-repository/metis-repository-rest/k8s/overlays/test/components/properties/application.properties"/>
+
diff --git a/metis-authentication/metis-authentication-common/pom.xml b/metis-authentication/metis-authentication-common/pom.xml
index f0c7832a5f..af03a2ab3b 100644
--- a/metis-authentication/metis-authentication-common/pom.xml
+++ b/metis-authentication/metis-authentication-common/pom.xml
@@ -4,7 +4,7 @@
metis-authenticationeu.europeana.metis
- 10
+ 11metis-authentication-common
diff --git a/metis-authentication/metis-authentication-rest-client/pom.xml b/metis-authentication/metis-authentication-rest-client/pom.xml
index 31a7a52b80..e46cb131d9 100644
--- a/metis-authentication/metis-authentication-rest-client/pom.xml
+++ b/metis-authentication/metis-authentication-rest-client/pom.xml
@@ -4,7 +4,7 @@
metis-authenticationeu.europeana.metis
- 10
+ 11metis-authentication-rest-client
diff --git a/metis-authentication/metis-authentication-rest/pom.xml b/metis-authentication/metis-authentication-rest/pom.xml
index f8d1761485..19d983e9d1 100644
--- a/metis-authentication/metis-authentication-rest/pom.xml
+++ b/metis-authentication/metis-authentication-rest/pom.xml
@@ -4,7 +4,7 @@
metis-authenticationeu.europeana.metis
- 10
+ 11metis-authentication-rest
@@ -58,6 +58,11 @@
eu.europeana.metismetis-common-utils
+
+ eu.europeana.metis
+ metis-common-spring-properties
+ ${project.version}
+ eu.europeana.metismetis-authentication-common
diff --git a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/ApplicationConfiguration.java b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/ApplicationConfiguration.java
index a907205490..540040bc1e 100644
--- a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/ApplicationConfiguration.java
+++ b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/ApplicationConfiguration.java
@@ -3,6 +3,7 @@
import static eu.europeana.metis.utils.SonarqubeNullcheckAvoidanceUtils.performAction;
import eu.europeana.metis.authentication.dao.PsqlMetisUserDao;
+import eu.europeana.metis.authentication.rest.config.properties.MetisAuthenticationConfigurationProperties;
import eu.europeana.metis.authentication.service.AuthenticationService;
import eu.europeana.metis.authentication.user.MetisUser;
import eu.europeana.metis.authentication.user.MetisUserAccessToken;
@@ -16,24 +17,35 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
+import java.lang.invoke.MethodHandles;
import java.nio.charset.StandardCharsets;
import java.util.List;
import javax.annotation.PreDestroy;
+import metis.common.config.properties.TruststoreConfigurationProperties;
+import metis.common.config.properties.postgres.HibernateConfigurationProperties;
+import metis.common.config.properties.zoho.ZohoConfigurationProperties;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.service.ServiceRegistry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Import;
import org.springframework.core.io.Resource;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter;
+import org.springframework.lang.NonNull;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.FileCopyUtils;
@@ -47,67 +59,45 @@
* @since 2017-10-27
*/
@Configuration
-@Import({ElasticAPMConfiguration.class})
+@EnableConfigurationProperties({
+ ElasticAPMConfiguration.class, TruststoreConfigurationProperties.class,
+ HibernateConfigurationProperties.class, ZohoConfigurationProperties.class,
+ MetisAuthenticationConfigurationProperties.class})
@ComponentScan(basePackages = {"eu.europeana.metis.authentication.rest.controller"})
@EnableScheduling
-public class ApplicationConfiguration implements WebMvcConfigurer {
+public class ApplicationConfiguration implements WebMvcConfigurer, ApplicationContextAware {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@Value("classpath:create_tables.sql")
private Resource createTablesSqlResource;
- //Custom trustore
- @Value("${truststore.path}")
- private String truststorePath;
- @Value("${truststore.password}")
- private String truststorePassword;
- @Value("${metis.access.token.expire.time.in.mins}")
- private int metisAccessTokenExpireTimeInMins;
- @Value("${allowed.cors.hosts}")
- private String[] allowedCorsHosts;
-
- //Zoho configuration
- @Value("${zoho.initial.grant.token}")
- private String zohoInitialGrantToken;
- @Value("${zoho.refresh.token}")
- private String zohoRefreshToken;
- @Value("${zoho.current.user.email}")
- private String zohoCurrentUserEmail;
- @Value("${zoho.client.id}")
- private String zohoClientId;
- @Value("${zoho.client.secret}")
- private String zohoClientSecret;
- @Value("${zoho.redirect.uri}")
- private String zohoRedirectUri;
-
- //Hibernate configuration
- @Value("${hibernate.connection.driver_class}")
- private String hibernateConnectionDriverClass;
- @Value("${hibernate.connection.url}")
- private String hibernateConnectionUrl;
- @Value("${hibernate.dialect}")
- private String hibernateDialect;
- @Value("${hibernate.connection.username}")
- private String hibernateConnectionUsername;
- @Value("${hibernate.connection.password}")
- private String hibernateConnectionPassword;
- @Value("${hibernate.c3p0.min_size}")
- private String hibernateC3P0MinSize;
- @Value("${hibernate.c3p0.max_size}")
- private String hibernateC3P0MaxSize;
- @Value("${hibernate.c3p0.timeout}")
- private String hibernateC3P0Timeout;
- @Value("${hibernate.c3p0.max_statements}")
- private String hibernateC3p0MaxStatements;
-
private SessionFactory sessionFactory;
private AuthenticationService authenticationService;
private MetisZohoOAuthPSQLHandler metisZohoOAuthPSQLHandler;
+ private ApplicationContext applicationContext;
+
+ /**
+ * Constructor.
+ *
+ * @param truststoreConfigurationProperties the truststore configuration properties
+ * @throws CustomTruststoreAppender.TrustStoreConfigurationException if the configuration of the truststore failed
+ */
+ @Autowired
+ public ApplicationConfiguration(TruststoreConfigurationProperties truststoreConfigurationProperties)
+ throws TrustStoreConfigurationException {
+ ApplicationConfiguration.initializeApplication(truststoreConfigurationProperties);
+ }
+ /**
+ * Get the session factory.
+ *
+ * @param hibernateConfigurationProperties the hibernate configuration properties
+ * @return the session factory
+ * @throws IOException if an I/O error occurs during sql script initialization
+ */
@Bean
- public SessionFactory getSessionFactory() throws TrustStoreConfigurationException, IOException {
- if (StringUtils.isNotEmpty(truststorePath) && StringUtils.isNotEmpty(truststorePassword)) {
- CustomTruststoreAppender.appendCustomTrustoreToDefault(truststorePath, truststorePassword);
- }
+ public SessionFactory getSessionFactory(HibernateConfigurationProperties hibernateConfigurationProperties) throws IOException {
org.hibernate.cfg.Configuration configuration = new org.hibernate.cfg.Configuration();
configuration.addAnnotatedClass(MetisUser.class);
@@ -115,15 +105,17 @@ public SessionFactory getSessionFactory() throws TrustStoreConfigurationExceptio
configuration.addAnnotatedClass(MetisZohoOAuthToken.class);
//Apply code configuration to allow spring boot to handle the properties injection
- configuration.setProperty("hibernate.connection.driver_class", hibernateConnectionDriverClass);
- configuration.setProperty("hibernate.connection.url", hibernateConnectionUrl);
- configuration.setProperty("hibernate.dialect", hibernateDialect);
- configuration.setProperty("hibernate.connection.username", hibernateConnectionUsername);
- configuration.setProperty("hibernate.connection.password", hibernateConnectionPassword);
- configuration.setProperty("hibernate.c3p0.min_size", hibernateC3P0MinSize);
- configuration.setProperty("hibernate.c3p0.max_size", hibernateC3P0MaxSize);
- configuration.setProperty("hibernate.c3p0.timeout", hibernateC3P0Timeout);
- configuration.setProperty("hibernate.c3p0.max_statements", hibernateC3p0MaxStatements);
+ configuration.setProperty("hibernate.connection.driver_class",
+ hibernateConfigurationProperties.getConnection().getDriverClass());
+ configuration.setProperty("hibernate.connection.url", hibernateConfigurationProperties.getConnection().getUrl());
+ configuration.setProperty("hibernate.connection.username", hibernateConfigurationProperties.getConnection().getUsername());
+ configuration.setProperty("hibernate.connection.password", hibernateConfigurationProperties.getConnection().getPassword());
+ configuration.setProperty("hibernate.dialect", hibernateConfigurationProperties.getDialect());
+ configuration.setProperty("hibernate.c3p0.min_size", hibernateConfigurationProperties.getC3p0().getMinSize());
+ configuration.setProperty("hibernate.c3p0.max_size", hibernateConfigurationProperties.getC3p0().getMaxSize());
+ configuration.setProperty("hibernate.c3p0.timeout", hibernateConfigurationProperties.getC3p0().getTimeout());
+ configuration.setProperty("hibernate.c3p0.max_statements", hibernateConfigurationProperties.getC3p0().getMaxStatements());
+ configuration.setProperty("hibernate.hbm2ddl.auto", hibernateConfigurationProperties.getHbm2ddl().getAuto());
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder()
.applySettings(configuration.getProperties()).build();
@@ -146,10 +138,42 @@ public SessionFactory getSessionFactory() throws TrustStoreConfigurationExceptio
return sessionFactory;
}
+ /**
+ * Set the application context.
+ *
+ * @param applicationContext the application context
+ * @throws BeansException if a beans exception occurs
+ */
+ @Override
+ public void setApplicationContext(@NonNull ApplicationContext applicationContext) throws BeansException {
+ this.applicationContext = applicationContext;
+ }
+
+ /**
+ * This method performs the initializing tasks for the application.
+ *
+ * @param truststoreConfigurationProperties The properties.
+ * @throws CustomTruststoreAppender.TrustStoreConfigurationException In case a problem occurred with the truststore.
+ */
+ static void initializeApplication(TruststoreConfigurationProperties truststoreConfigurationProperties)
+ throws CustomTruststoreAppender.TrustStoreConfigurationException {
+
+ // Load the trust store file.
+ if (StringUtils.isNotEmpty(truststoreConfigurationProperties.getPath()) && StringUtils
+ .isNotEmpty(truststoreConfigurationProperties.getPassword())) {
+ CustomTruststoreAppender
+ .appendCustomTruststoreToDefault(truststoreConfigurationProperties.getPath(),
+ truststoreConfigurationProperties.getPassword());
+ LOGGER.info("Custom truststore appended to default truststore");
+ }
+ }
+
@Override
public void addCorsMappings(CorsRegistry registry) {
+ MetisAuthenticationConfigurationProperties metisAuthenticationConfigurationProperties =
+ applicationContext.getBean(MetisAuthenticationConfigurationProperties.class);
registry.addMapping("/**").allowedMethods("GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS")
- .allowedOrigins(allowedCorsHosts);
+ .allowedOrigins(metisAuthenticationConfigurationProperties.getAllowedCorsHosts());
}
/**
@@ -170,17 +194,21 @@ public AuthenticationService getAuthenticationService(PsqlMetisUserDao psqlMetis
* Get the zoho access client.
*
* @param sessionFactory the session factory
+ * @param zohoConfigurationProperties the zoho configuration properties
* @return the zoho access client
* @throws ZohoException if a zoho configuration error occurred
*/
@Bean
- public ZohoAccessClient getZohoAccessClient(SessionFactory sessionFactory) throws ZohoException {
- metisZohoOAuthPSQLHandler = new MetisZohoOAuthPSQLHandler(sessionFactory, zohoCurrentUserEmail, zohoRefreshToken,
- zohoClientId, zohoClientSecret);
+ public ZohoAccessClient getZohoAccessClient(SessionFactory sessionFactory,
+ ZohoConfigurationProperties zohoConfigurationProperties) throws ZohoException {
+ metisZohoOAuthPSQLHandler = new MetisZohoOAuthPSQLHandler(sessionFactory, zohoConfigurationProperties.getCurrentUserEmail(),
+ zohoConfigurationProperties.getRefreshToken(),
+ zohoConfigurationProperties.getClientId(), zohoConfigurationProperties.getClientSecret());
final ZohoAccessClient zohoAccessClient = new ZohoAccessClient(metisZohoOAuthPSQLHandler,
- zohoCurrentUserEmail, zohoClientId, zohoClientSecret, zohoInitialGrantToken,
- zohoRedirectUri);
+ zohoConfigurationProperties.getCurrentUserEmail(), zohoConfigurationProperties.getClientId(),
+ zohoConfigurationProperties.getClientSecret(), zohoConfigurationProperties.getInitialGrantToken(),
+ zohoConfigurationProperties.getRedirectUri());
//Make a call to zoho so that the grant token will generate the first pair of access/refresh tokens
zohoAccessClient.getZohoRecordContactByEmail("");
return zohoAccessClient;
@@ -190,12 +218,15 @@ public ZohoAccessClient getZohoAccessClient(SessionFactory sessionFactory) throw
* Get the DAO for metis users.
*
* @param sessionFactory the session factory required to initialize the DAO
+ * @param metisAuthenticationConfigurationProperties the metis authentication configuration properties
* @return the DAO instance for accessing user information
*/
@Bean
- public PsqlMetisUserDao getPsqlMetisUserDao(SessionFactory sessionFactory) {
+ public PsqlMetisUserDao getPsqlMetisUserDao(SessionFactory sessionFactory,
+ MetisAuthenticationConfigurationProperties metisAuthenticationConfigurationProperties) {
PsqlMetisUserDao psqlMetisUserDao = new PsqlMetisUserDao(sessionFactory);
- psqlMetisUserDao.setAccessTokenExpireTimeInMins(metisAccessTokenExpireTimeInMins);
+ psqlMetisUserDao.setAccessTokenExpireTimeInMins(
+ metisAuthenticationConfigurationProperties.getAccessTokenExpireTimeInMinutes());
return psqlMetisUserDao;
}
diff --git a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/properties/MetisAuthenticationConfigurationProperties.java b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/properties/MetisAuthenticationConfigurationProperties.java
new file mode 100644
index 0000000000..2fe1de1086
--- /dev/null
+++ b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/config/properties/MetisAuthenticationConfigurationProperties.java
@@ -0,0 +1,29 @@
+package eu.europeana.metis.authentication.rest.config.properties;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * Class using {@link ConfigurationProperties} loading.
+ */
+@ConfigurationProperties(prefix = "metis-authentication")
+public class MetisAuthenticationConfigurationProperties {
+
+ private int accessTokenExpireTimeInMinutes;
+ private String allowedCorsHosts;
+
+ public int getAccessTokenExpireTimeInMinutes() {
+ return accessTokenExpireTimeInMinutes;
+ }
+
+ public void setAccessTokenExpireTimeInMinutes(int accessTokenExpireTimeInMinutes) {
+ this.accessTokenExpireTimeInMinutes = accessTokenExpireTimeInMinutes;
+ }
+
+ public String getAllowedCorsHosts() {
+ return allowedCorsHosts;
+ }
+
+ public void setAllowedCorsHosts(String allowedCorsHosts) {
+ this.allowedCorsHosts = allowedCorsHosts;
+ }
+}
diff --git a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java
index 7fb6f57100..b7793cb890 100644
--- a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java
+++ b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java
@@ -60,8 +60,8 @@ public AuthenticationController(
/**
* Register a user using an authorization header.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Basic Base64Encoded(email:password)
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Basic
+ * Base64Encoded(email:password)
* @throws GenericMetisException which can be one of:
*
*
{@link BadContentException} if the authorization header is un-parsable or there is problem
@@ -84,8 +84,8 @@ public void registerUser(@RequestHeader("Authorization") String authorization)
/**
* Login functionality, which checks if the user with email exists and generates an access token.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Basic Base64Encoded(email:password)
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Basic
+ * Base64Encoded(email:password)
* @return {@link MetisUserView}
* @throws GenericMetisException which can be one of:
*
@@ -112,8 +112,8 @@ public MetisUserView loginUser(@RequestHeader("Authorization") String authorizat
/**
* Update a users password by authentication with an access token.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @param oldAndNewPasswordParameters contains the old and new password
* @throws GenericMetisException which can be one of:
*
@@ -133,7 +133,7 @@ public void updateUserPassword(@RequestHeader("Authorization") String authorizat
throw new BadContentException("oldPassword or newPassword not provided");
}
if (oldAndNewPasswordParameters.getOldPassword()
- .equals(oldAndNewPasswordParameters.getNewPassword())) {
+ .equals(oldAndNewPasswordParameters.getNewPassword())) {
throw new BadContentException("newPassword must be different than oldPassword");
}
String accessToken = authenticationService
@@ -151,8 +151,8 @@ public void updateUserPassword(@RequestHeader("Authorization") String authorizat
/**
* Delete a user from the system.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @param emailParameter the class that contains the email parameter to act upon
* @throws GenericMetisException which can be one of:
*
@@ -185,8 +185,8 @@ public void deleteUser(@RequestHeader("Authorization") String authorization,
/**
* Update a user by re-retrieving the user from the remote CRM.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @param emailParameter the class that contains the email parameter to act upon
* @return updated {@link MetisUserView}
* @throws GenericMetisException which can be one of:
@@ -222,8 +222,8 @@ public MetisUserView updateUser(@RequestHeader("Authorization") String authoriza
/**
* Change the {@link AccountRole} of a user.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @param emailParameter the class that contains the email parameter to act upon
* @throws GenericMetisException which can be one of:
*
@@ -257,8 +257,8 @@ public void updateUserToMakeAdmin(@RequestHeader("Authorization") String authori
* Get a user using a user identifier.
*
POST method is used to pass the user identifier through the body
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @param userIdParameter the class that contains the userId parameter to act upon
* @return the metis user
* @throws GenericMetisException which can be one of:
@@ -284,8 +284,8 @@ public MetisUserView getUserByUserId(@RequestHeader("Authorization") String auth
/**
* Retrieve a user by using an access token.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @return the corresponding {@link MetisUserView}
* @throws GenericMetisException which can be one of:
*
@@ -309,8 +309,8 @@ public MetisUserView getUserByAccessToken(@RequestHeader("Authorization") String
/**
* Retrieve a list of all the users in the system.
*
- * @param authorization the String provided by an HTTP Authorization header
The expected input
- * should follow the rule Bearer accessTokenHere
+ * @param authorization the String provided by an HTTP Authorization header
The expected input should follow the rule Bearer
+ * accessTokenHere
* @return the list with all the {@link MetisUserView}s
* @throws GenericMetisException which can be one of:
*
It will find stale executions and will re-submit them in the distributed queue.
*/
- @Scheduled(fixedDelayString = "${periodic.failsafe.check.in.millisecs}")
+ // TODO: 24/08/2023 Is there a better way to load the configuration here?
+ @Scheduled(fixedDelayString = "${metis-core.periodicFailsafeCheckInMilliseconds}")
public void runFailsafeExecutor() {
- LOGGER.info("Failsafe task started (runs every {} milliseconds).",
- propertiesHolder.getPeriodicFailsafeCheckInMillisecs());
this.workflowExecutionMonitor.performFailsafe();
LOGGER.info("Failsafe task finished.");
}
@@ -293,10 +325,11 @@ public void runFailsafeExecutor() {
*
Checks if scheduled workflows are valid for starting and sends them to the distributed
* queue.
*/
- @Scheduled(fixedDelayString = "${periodic.scheduler.check.in.millisecs}", initialDelayString = "${periodic.scheduler.check.in.millisecs}")
+ // TODO: 24/08/2023 Is there a better way to load the configuration here?
+ @Scheduled(
+ fixedDelayString = "${metis-core.periodicSchedulerCheckInMilliseconds}",
+ initialDelayString = "${metis-core.periodicSchedulerCheckInMilliseconds}")
public void runSchedulingExecutor() {
- LOGGER.info("Scheduler task started (runs every {} milliseconds).",
- propertiesHolder.getPeriodicSchedulerCheckInMillisecs());
this.schedulerExecutor.performScheduling();
LOGGER.info("Scheduler task finished.");
}
diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java
index 6d8dc71770..f38ad88797 100644
--- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java
+++ b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java
@@ -24,11 +24,13 @@
import javax.annotation.PreDestroy;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
+import metis.common.config.properties.TruststoreConfigurationProperties;
+import metis.common.config.properties.rabbitmq.RabbitmqConfigurationProperties;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@@ -37,51 +39,41 @@
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
+@EnableConfigurationProperties({RabbitmqConfigurationProperties.class, TruststoreConfigurationProperties.class})
@ComponentScan(basePackages = {"eu.europeana.metis.core.rest.controller"})
@EnableScheduling
public class QueueConfig implements WebMvcConfigurer {
private static final Logger LOGGER = LoggerFactory.getLogger(QueueConfig.class);
-
- private final ConfigurationPropertiesHolder propertiesHolder;
private QueueConsumer queueConsumer;
private Connection connection;
private Channel publisherChannel;
private Channel consumerChannel;
- /**
- * Constructor with the required properties class.
- *
- * @param propertiesHolder the properties holder
- */
- @Autowired
- public QueueConfig(ConfigurationPropertiesHolder propertiesHolder) {
- this.propertiesHolder = propertiesHolder;
- }
-
@Bean
- Connection getConnection()
+ Connection getConnection(RabbitmqConfigurationProperties rabbitmqConfigurationProperties,
+ TruststoreConfigurationProperties truststoreConfigurationProperties)
throws KeyManagementException, NoSuchAlgorithmException, IOException, TimeoutException, KeyStoreException, CertificateException {
ConnectionFactory connectionFactory = new ConnectionFactory();
- connectionFactory.setHost(propertiesHolder.getRabbitmqHost());
- connectionFactory.setPort(propertiesHolder.getRabbitmqPort());
+ connectionFactory.setHost(rabbitmqConfigurationProperties.getHost());
+ connectionFactory.setPort(rabbitmqConfigurationProperties.getPort());
connectionFactory.setVirtualHost(
- StringUtils.isNotBlank(propertiesHolder.getRabbitmqVirtualHost()) ? propertiesHolder
- .getRabbitmqVirtualHost() : "/");
- connectionFactory.setUsername(propertiesHolder.getRabbitmqUsername());
- connectionFactory.setPassword(propertiesHolder.getRabbitmqPassword());
+ StringUtils.isNotBlank(rabbitmqConfigurationProperties.getVirtualHost()) ? rabbitmqConfigurationProperties
+ .getVirtualHost() : "/");
+ connectionFactory.setUsername(rabbitmqConfigurationProperties.getUsername());
+ connectionFactory.setPassword(rabbitmqConfigurationProperties.getPassword());
connectionFactory.setAutomaticRecoveryEnabled(true);
- if (propertiesHolder.isRabbitmqEnableSSL()) {
- if (propertiesHolder.isRabbitmqEnableCustomTruststore()) {
+ if (rabbitmqConfigurationProperties.isEnableSsl()) {
+ if (rabbitmqConfigurationProperties.isEnableCustomTruststore()) {
// Load the ssl context with the provided truststore
final KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
// This file is determined in the config files, it does not pose a risk.
@SuppressWarnings("findsecbugs:PATH_TRAVERSAL_IN")
final Path trustStoreFile = Paths.get(
- propertiesHolder.getTruststorePath());
+ truststoreConfigurationProperties.getPath());
try (final InputStream inputStream = Files.newInputStream(trustStoreFile)) {
- keyStore.load(inputStream, propertiesHolder.getTruststorePassword().toCharArray());
+ keyStore.load(inputStream, truststoreConfigurationProperties.getPassword().toCharArray());
}
TrustManagerFactory trustManagerFactory = TrustManagerFactory
.getInstance(TrustManagerFactory.getDefaultAlgorithm());
@@ -104,42 +96,47 @@ Connection getConnection()
}
@Bean(name = "rabbitmqPublisherChannel")
- Channel getRabbitmqPublisherChannel(Connection connection) throws IOException {
+ Channel getRabbitmqPublisherChannel(Connection connection, RabbitmqConfigurationProperties rabbitmqConfigurationProperties)
+ throws IOException {
publisherChannel = connection.createChannel();
- setupChannelProperties(publisherChannel);
+ setupChannelProperties(publisherChannel, rabbitmqConfigurationProperties);
return publisherChannel;
}
@Bean(name = "rabbitmqConsumerChannel")
- Channel getRabbitmqConsumerChannel(Connection connection) throws IOException {
+ Channel getRabbitmqConsumerChannel(Connection connection, RabbitmqConfigurationProperties rabbitmqConfigurationProperties)
+ throws IOException {
consumerChannel = connection.createChannel();
- setupChannelProperties(consumerChannel);
+ setupChannelProperties(consumerChannel, rabbitmqConfigurationProperties);
return consumerChannel;
}
- private void setupChannelProperties(Channel channel) throws IOException {
+ private void setupChannelProperties(Channel channel, RabbitmqConfigurationProperties rabbitmqConfigurationProperties)
+ throws IOException {
Map args = new ConcurrentHashMap<>();
args.put("x-max-priority",
- propertiesHolder.getRabbitmqHighestPriority());//Higher number means higher priority
+ rabbitmqConfigurationProperties.getHighestPriority());//Higher number means higher priority
//Second boolean durable to false
- channel.queueDeclare(propertiesHolder.getRabbitmqQueueName(), false, false, false, args);
+ channel.queueDeclare(rabbitmqConfigurationProperties.getQueueName(), false, false, false, args);
}
@Bean
- public QueueConsumer getQueueConsumer(WorkflowExecutorManager workflowExecutionManager,
+ public QueueConsumer getQueueConsumer(
+ RabbitmqConfigurationProperties rabbitmqConfigurationProperties,
+ WorkflowExecutorManager workflowExecutionManager,
WorkflowExecutionMonitor workflowExecutionMonitor,
@Qualifier("rabbitmqConsumerChannel") Channel rabbitmqConsumerChannel) throws IOException {
queueConsumer = new QueueConsumer(rabbitmqConsumerChannel,
- propertiesHolder.getRabbitmqQueueName(), workflowExecutionManager, workflowExecutionManager,
+ rabbitmqConfigurationProperties.getQueueName(), workflowExecutionManager, workflowExecutionManager,
workflowExecutionMonitor);
return queueConsumer;
}
- @Scheduled(fixedDelayString = "${polling.timeout.for.cleaning.completion.service.in.millisecs}",
- initialDelayString = "${polling.timeout.for.cleaning.completion.service.in.millisecs}")
+ // TODO: 24/08/2023 Is there a better way to load the configuration here?
+ @Scheduled(
+ fixedDelayString = "${metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds}",
+ initialDelayString = "${metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds}")
public void runQueueConsumerCleanup() throws InterruptedException {
- LOGGER.debug("Queue consumer cleanup started (runs every {} milliseconds).",
- propertiesHolder.getPollingTimeoutForCleaningCompletionServiceInMillisecs());
this.queueConsumer.checkAndCleanCompletionService();
LOGGER.debug("Queue consumer cleanup finished.");
}
diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java
new file mode 100644
index 0000000000..c264ff69fc
--- /dev/null
+++ b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java
@@ -0,0 +1,187 @@
+package eu.europeana.metis.core.rest.config.properties;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * Class using {@link ConfigurationProperties} loading.
+ */
+@ConfigurationProperties(prefix = "metis-core")
+public class MetisCoreConfigurationProperties {
+
+ private int maxConcurrentThreads;
+ private int dpsMonitorCheckIntervalInSeconds;
+ private int dpsConnectTimeoutInMilliseconds;
+ private int dpsReadTimeoutInMilliseconds;
+ private int failsafeMarginOfInactivityInSeconds;
+ private int periodicFailsafeCheckInMilliseconds;
+ private int periodicSchedulerCheckInMilliseconds;
+ private int pollingTimeoutForCleaningCompletionServiceInMilliseconds;
+ private int periodOfNoProcessedRecordsChangeInMinutes;
+ private int threadLimitThrottlingLevelWeak;
+ private int threadLimitThrottlingLevelMedium;
+ private int threadLimitThrottlingLevelStrong;
+
+ private String baseUrl;
+ private int maxServedExecutionListLength;
+ private int maxDepublishRecordIdsPerDataset;
+
+ private int linkCheckingDefaultSamplingSize;
+ private int solrCommitPeriodInMinutes;
+
+ private String authenticationBaseUrl;
+ private String[] allowedCorsHosts;
+
+
+ public int getMaxConcurrentThreads() {
+ return maxConcurrentThreads;
+ }
+
+ public void setMaxConcurrentThreads(int maxConcurrentThreads) {
+ this.maxConcurrentThreads = maxConcurrentThreads;
+ }
+
+ public int getDpsMonitorCheckIntervalInSeconds() {
+ return dpsMonitorCheckIntervalInSeconds;
+ }
+
+ public void setDpsMonitorCheckIntervalInSeconds(int dpsMonitorCheckIntervalInSeconds) {
+ this.dpsMonitorCheckIntervalInSeconds = dpsMonitorCheckIntervalInSeconds;
+ }
+
+ public int getDpsConnectTimeoutInMilliseconds() {
+ return dpsConnectTimeoutInMilliseconds;
+ }
+
+ public void setDpsConnectTimeoutInMilliseconds(int dpsConnectTimeoutInMilliseconds) {
+ this.dpsConnectTimeoutInMilliseconds = dpsConnectTimeoutInMilliseconds;
+ }
+
+ public int getDpsReadTimeoutInMilliseconds() {
+ return dpsReadTimeoutInMilliseconds;
+ }
+
+ public void setDpsReadTimeoutInMilliseconds(int dpsReadTimeoutInMilliseconds) {
+ this.dpsReadTimeoutInMilliseconds = dpsReadTimeoutInMilliseconds;
+ }
+
+ public int getFailsafeMarginOfInactivityInSeconds() {
+ return failsafeMarginOfInactivityInSeconds;
+ }
+
+ public void setFailsafeMarginOfInactivityInSeconds(int failsafeMarginOfInactivityInSeconds) {
+ this.failsafeMarginOfInactivityInSeconds = failsafeMarginOfInactivityInSeconds;
+ }
+
+ public int getPeriodicFailsafeCheckInMilliseconds() {
+ return periodicFailsafeCheckInMilliseconds;
+ }
+
+ public void setPeriodicFailsafeCheckInMilliseconds(int periodicFailsafeCheckInMilliseconds) {
+ this.periodicFailsafeCheckInMilliseconds = periodicFailsafeCheckInMilliseconds;
+ }
+
+ public int getPeriodicSchedulerCheckInMilliseconds() {
+ return periodicSchedulerCheckInMilliseconds;
+ }
+
+ public void setPeriodicSchedulerCheckInMilliseconds(int periodicSchedulerCheckInMilliseconds) {
+ this.periodicSchedulerCheckInMilliseconds = periodicSchedulerCheckInMilliseconds;
+ }
+
+ public int getPollingTimeoutForCleaningCompletionServiceInMilliseconds() {
+ return pollingTimeoutForCleaningCompletionServiceInMilliseconds;
+ }
+
+ public void setPollingTimeoutForCleaningCompletionServiceInMilliseconds(
+ int pollingTimeoutForCleaningCompletionServiceInMilliseconds) {
+ this.pollingTimeoutForCleaningCompletionServiceInMilliseconds = pollingTimeoutForCleaningCompletionServiceInMilliseconds;
+ }
+
+ public int getPeriodOfNoProcessedRecordsChangeInMinutes() {
+ return periodOfNoProcessedRecordsChangeInMinutes;
+ }
+
+ public void setPeriodOfNoProcessedRecordsChangeInMinutes(int periodOfNoProcessedRecordsChangeInMinutes) {
+ this.periodOfNoProcessedRecordsChangeInMinutes = periodOfNoProcessedRecordsChangeInMinutes;
+ }
+
+ public int getThreadLimitThrottlingLevelWeak() {
+ return threadLimitThrottlingLevelWeak;
+ }
+
+ public void setThreadLimitThrottlingLevelWeak(int threadLimitThrottlingLevelWeak) {
+ this.threadLimitThrottlingLevelWeak = threadLimitThrottlingLevelWeak;
+ }
+
+ public int getThreadLimitThrottlingLevelMedium() {
+ return threadLimitThrottlingLevelMedium;
+ }
+
+ public void setThreadLimitThrottlingLevelMedium(int threadLimitThrottlingLevelMedium) {
+ this.threadLimitThrottlingLevelMedium = threadLimitThrottlingLevelMedium;
+ }
+
+ public int getThreadLimitThrottlingLevelStrong() {
+ return threadLimitThrottlingLevelStrong;
+ }
+
+ public void setThreadLimitThrottlingLevelStrong(int threadLimitThrottlingLevelStrong) {
+ this.threadLimitThrottlingLevelStrong = threadLimitThrottlingLevelStrong;
+ }
+
+ public String getBaseUrl() {
+ return baseUrl;
+ }
+
+ public void setBaseUrl(String baseUrl) {
+ this.baseUrl = baseUrl;
+ }
+
+ public int getMaxServedExecutionListLength() {
+ return maxServedExecutionListLength;
+ }
+
+ public void setMaxServedExecutionListLength(int maxServedExecutionListLength) {
+ this.maxServedExecutionListLength = maxServedExecutionListLength;
+ }
+
+ public int getMaxDepublishRecordIdsPerDataset() {
+ return maxDepublishRecordIdsPerDataset;
+ }
+
+ public void setMaxDepublishRecordIdsPerDataset(int maxDepublishRecordIdsPerDataset) {
+ this.maxDepublishRecordIdsPerDataset = maxDepublishRecordIdsPerDataset;
+ }
+
+ public int getLinkCheckingDefaultSamplingSize() {
+ return linkCheckingDefaultSamplingSize;
+ }
+
+ public void setLinkCheckingDefaultSamplingSize(int linkCheckingDefaultSamplingSize) {
+ this.linkCheckingDefaultSamplingSize = linkCheckingDefaultSamplingSize;
+ }
+
+ public int getSolrCommitPeriodInMinutes() {
+ return solrCommitPeriodInMinutes;
+ }
+
+ public void setSolrCommitPeriodInMinutes(int solrCommitPeriodInMinutes) {
+ this.solrCommitPeriodInMinutes = solrCommitPeriodInMinutes;
+ }
+
+ public String getAuthenticationBaseUrl() {
+ return authenticationBaseUrl;
+ }
+
+ public void setAuthenticationBaseUrl(String authenticationBaseUrl) {
+ this.authenticationBaseUrl = authenticationBaseUrl;
+ }
+
+ public String[] getAllowedCorsHosts() {
+ return allowedCorsHosts == null ? null : allowedCorsHosts.clone();
+ }
+
+ public void setAllowedCorsHosts(String[] allowedCorsHosts) {
+ this.allowedCorsHosts = allowedCorsHosts == null ? null : allowedCorsHosts.clone();
+ }
+}
diff --git a/metis-core/metis-core-rest/src/main/resources/application.properties.example b/metis-core/metis-core-rest/src/main/resources/application.properties.example
index dc4e7cac82..4aefcc48ef 100644
--- a/metis-core/metis-core-rest/src/main/resources/application.properties.example
+++ b/metis-core/metis-core-rest/src/main/resources/application.properties.example
@@ -1,5 +1,7 @@
#Spring
logging.config=/data/logging/log4j2.xml
+#logging.config=log4j2.xml
+server.error.whitelabel.enabled=false
spring.servlet.multipart.max-file-size=5MB
spring.servlet.multipart.max-request-size=5MB
spring.autoconfigure.exclude=\
@@ -11,111 +13,108 @@ spring.autoconfigure.exclude=\
truststore.path=
truststore.password=
+#Socks Proxy
+socks-proxy.enabled=
+socks-proxy.host=
+socks-proxy.port=
+socks-proxy.username=
+socks-proxy.password=
+
#Orchestration
-max.concurrent.threads=
-dps.monitor.check.interval.in.secs=
-dps.connect.timeout.in.millisecs=
-dps.read.timeout.in.millisecs=
-failsafe.margin.of.inactivity.in.secs=
-periodic.failsafe.check.in.millisecs=
-periodic.scheduler.check.in.millisecs=
-polling.timeout.for.cleaning.completion.service.in.millisecs=
+metis-core.maxConcurrentThreads=1
+metis-core.dpsMonitorCheckIntervalInSeconds=5
+metis-core.dpsConnectTimeoutInMilliseconds=10000
+metis-core.dpsReadTimeoutInMilliseconds=30000
+metis-core.failsafeMarginOfInactivityInSeconds=5
+metis-core.periodicFailsafeCheckInMilliseconds=60000
+metis-core.periodicSchedulerCheckInMilliseconds=90000
+metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds=10000
#If a task passed this cap the task will be cancelled
-period.of.no.processed.records.change.in.minutes=
-thread.limit.throttling.level.weak=
-thread.limit.throttling.level.medium=
-thread.limit.throttling.level.strong=
-
-#Socks Proxy
-socks.proxy.enabled=
-socks.proxy.host=
-socks.proxy.port=
-socks.proxy.username=
-socks.proxy.password=
+metis-core.periodOfNoProcessedRecordsChangeInMinutes=30
+metis-core.threadLimitThrottlingLevelWeak=16
+metis-core.threadLimitThrottlingLevelMedium=8
+metis-core.threadLimitThrottlingLevelStrong=4
+#Use this to specify the FQDN where the application will be hosted under
+metis-core.baseUrl=https://metis-core-rest.test.eanadev.org
+#Use this to specify the maximum execution list length that is served by
+#Metis Core (regardless on whether the list is paginated).
+metis-core.maxServedExecutionListLength=200
+metis-core.maxDepublishRecordIdsPerDataset=1000
+#Use this to specify the default sampling size for Link Checking
+metis-core.linkCheckingDefaultSamplingSize=1000
+#Solr
+metis-core.solrCommitPeriodInMinutes=15
+# Authentication
+metis-core.authenticationBaseUrl=
+# CORS
+metis-core.allowedCorsHosts=
#RabbitMq
rabbitmq.host=
rabbitmq.port=
rabbitmq.username=
rabbitmq.password=
-rabbitmq.virtual.host=
-rabbitmq.queue.name=
-rabbitmq.highest.priority=
-rabbitmq.enableSSL=
-#True if a custom certificate is used in the truststore defined above
-rabbitmq.enable.custom.truststore=
+rabbitmq.virtualHost=/
+rabbitmq.queueName=UserWorkflowExecution
+rabbitmq.highestPriority=10
+rabbitmq.enableSsl=
+rabbitmq.enableCustomTruststore=
#Mongo
mongo.hosts=
-mongo.port=
-mongo.authentication.db=
+mongo.ports=
+mongo.authenticationDatabase=
mongo.username=
mongo.password=
-mongo.enableSSL=
-mongo.db=
-mongo.application.name=
+mongo.enableSsl=
+mongo.database=metis-core
+mongo.applicationName=metis-core-local
#Redis
redis.host=
-redis.port=0
+redis.port=
redis.username=
redis.password=
-redis.enableSSL=
-#True if a custom certificate is used in the truststore defined above
-redis.enable.custom.truststore=
-redisson.connection.pool.size=
-redisson.connect.timeout.in.millisecs=
-redisson.lock.watchdog.timeout.in.secs=
+redis.enableSsl=
+redis.enableCustomTruststore=
+redis.redisson.connectionPoolSize=16
+redis.redisson.connectTimeoutInSeconds=60
+redis.redisson.lockWatchdogTimeoutInSeconds=120
#Setting to -1 disables DNS monitoring
-redisson.dns.monitor.interval.in.millisecs=
-redisson.idle.connection.timeout.in.millisecs=
-redisson.retry.attempts=
-
-solr.commit.period.in.mins=
+redis.redisson.dnsMonitorIntervalInSeconds=60
+redis.redisson.idleConnectionTimeoutInSeconds=60
+redis.redisson.retryAttempts=10
-#ECloud
+# ECloud
ecloud.baseUrl=
-ecloud.dps.baseUrl=
+ecloud.dpsBaseUrl=
ecloud.provider=
ecloud.username=
ecloud.password=
-#Validation
-validation.external.schema.zip=
-validation.external.schema.root=
-validation.external.schematron.root=
-validation.internal.schema.zip=
-validation.internal.schema.root=
-validation.internal.schematron.root=
+#Validation parameters
+validation.validationExternalSchemaZip=
+validation.validationExternalSchemaRoot=
+validation.validationExternalSchematronRoot=
+validation.validationInternalSchemaZip=
+validation.validationInternalSchemaRoot=
+validation.validationInternalSchematronRoot=
-#Authentication
-authentication.baseUrl=
-
-#CORS
-allowed.cors.hosts=*
-
-#Use this to specify the FQDN where the application will be hosted under
-metis.core.baseUrl=
-#Use this to specify the maximum execution list length that is served by
-#Metis Core (regardless on whether the list is paginated).
-metis.core.max.served.execution.list.length=
-metis.core.max.depublish.record.ids.per.dataset=
-metis.link.checking.default.sampling.size=
-
-#Actuator health probes
+#Actuator
management.endpoint.health.probes.enabled=true
management.health.livenessState.enabled=true
management.health.readinessState.enabled=true
#Elastic APM
-elastic.apm.enabled=false
+elastic.apm.enabled=true
elastic.apm.recording=true
elastic.apm.instrument=true
elastic.apm.service_name=metis-core-local
elastic.apm.server_url=
-elastic.apm.environment=test
+elastic.apm.environment=local
elastic.apm.application_packages=eu.europeana
elastic.apm.log_level=ERROR
elastic.apm.capture_body=all
elastic.apm.capture_headers=true
elastic.apm.metrics_interval=5s
+
diff --git a/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt b/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt
index 9e470d9b4e..89f805227b 100644
--- a/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt
+++ b/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt
@@ -39,8 +39,8 @@
+
+-->
@@ -415,7 +415,7 @@
-
+
diff --git a/metis-core/metis-core-service/pom.xml b/metis-core/metis-core-service/pom.xml
index 92e1e87af1..513a28a889 100644
--- a/metis-core/metis-core-service/pom.xml
+++ b/metis-core/metis-core-service/pom.xml
@@ -4,7 +4,7 @@
metis-coreeu.europeana.metis
- 10
+ 11metis-core-service
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java
index 2e7b47c4de..5a3851c275 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java
@@ -9,13 +9,14 @@
import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptions;
import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN;
+import dev.morphia.UpdateOptions;
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
import dev.morphia.query.Sort;
-import dev.morphia.query.experimental.filters.Filter;
-import dev.morphia.query.experimental.filters.Filters;
-import dev.morphia.query.experimental.updates.UpdateOperator;
-import dev.morphia.query.experimental.updates.UpdateOperators;
+import dev.morphia.query.filters.Filter;
+import dev.morphia.query.filters.Filters;
+import dev.morphia.query.updates.UpdateOperator;
+import dev.morphia.query.updates.UpdateOperators;
import eu.europeana.cloud.mcs.driver.DataSetServiceClient;
import eu.europeana.cloud.service.mcs.exception.DataSetAlreadyExistsException;
import eu.europeana.cloud.service.mcs.exception.MCSException;
@@ -302,7 +303,7 @@ public int findNextInSequenceDatasetId() {
.set("sequence", datasetIdSequence.getSequence());
retryableExternalRequestForNetworkExceptions(
- () -> updateQuery.update(updateOperator).execute());
+ () -> updateQuery.update(new UpdateOptions(), updateOperator));
return datasetIdSequence.getSequence();
}
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java
index e6c650e30b..7da54b6544 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java
@@ -10,7 +10,7 @@
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
import dev.morphia.query.Sort;
-import dev.morphia.query.experimental.filters.Filters;
+import dev.morphia.query.filters.Filters;
import eu.europeana.metis.core.dataset.DatasetXslt;
import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider;
import java.util.Optional;
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java
index 598f7881c4..b3c44865a3 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java
@@ -7,9 +7,9 @@
import dev.morphia.UpdateOptions;
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
-import dev.morphia.query.experimental.filters.Filters;
-import dev.morphia.query.experimental.updates.UpdateOperator;
-import dev.morphia.query.experimental.updates.UpdateOperators;
+import dev.morphia.query.filters.Filters;
+import dev.morphia.query.updates.UpdateOperator;
+import dev.morphia.query.updates.UpdateOperators;
import eu.europeana.metis.core.dataset.DepublishRecordId;
import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus;
import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider;
@@ -76,7 +76,7 @@ private Set getNonExistingRecordIds(String datasetId, Set record
// Create query for existing records in list. Only return record IDs.
final Query query = morphiaDatastoreProvider.getDatastore()
- .find(DepublishRecordId.class);
+ .find(DepublishRecordId.class);
query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId));
query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIds));
@@ -86,27 +86,25 @@ private Set getNonExistingRecordIds(String datasetId, Set record
findOptions.projection().exclude(DepublishRecordId.ID_FIELD);
final Set existing;
existing = getListOfQueryRetryable(query, findOptions).stream()
- .map(DepublishRecordId::getRecordId).collect(Collectors.toSet());
+ .map(DepublishRecordId::getRecordId).collect(Collectors.toSet());
// Return the other ones: the record IDs not found in the database.
return recordIds.stream().filter(recordId -> !existing.contains(recordId))
- .collect(Collectors.toSet());
+ .collect(Collectors.toSet());
});
}
/**
- * Add depublished records to persistence. This method checks whether the depublished record
- * already exists, and if so, doesn't add it again. All new records (but not the existing ones)
- * will have the default depublication status ({@link DepublicationStatus#PENDING_DEPUBLICATION})
- * and no depublication date.
+ * Add depublished records to persistence. This method checks whether the depublished record already exists, and if so, doesn't
+ * add it again. All new records (but not the existing ones) will have the default depublication status
+ * ({@link DepublicationStatus#PENDING_DEPUBLICATION}) and no depublication date.
*
* @param datasetId The dataset to which the records belong.
* @param candidateRecordIds The IDs of the depublish record ids to add.
- * @return How many of the passed records were in fact added. This counter is not thread-safe: if
- * multiple threads try to add the same records, their combined counters may overrepresent the
- * number of records that were actually added.
- * @throws BadContentException In case adding the records would violate the maximum number of
- * depublished records that each dataset can have.
+ * @return How many of the passed records were in fact added. This counter is not thread-safe: if multiple threads try to add
+ * the same records, their combined counters may overrepresent the number of records that were actually added.
+ * @throws BadContentException In case adding the records would violate the maximum number of depublished records that each
+ * dataset can have.
*/
public int createRecordIdsToBeDepublished(String datasetId, Set candidateRecordIds)
throws BadContentException {
@@ -150,14 +148,14 @@ void addRecords(Set recordIdsToAdd, String datasetId,
}
/**
- * Deletes a list of record ids from the database. Only record ids that are in a {@link
- * DepublicationStatus#PENDING_DEPUBLICATION} state will be removed.
+ * Deletes a list of record ids from the database. Only record ids that are in a
+ * {@link DepublicationStatus#PENDING_DEPUBLICATION} state will be removed.
*
* @param datasetId The dataset to which the depublish record ids belong.
* @param recordIds The depublish record ids to be removed
* @return The number or record ids that were removed.
- * @throws BadContentException In case adding the records would violate the maximum number of
- * depublished records that each dataset can have.
+ * @throws BadContentException In case adding the records would violate the maximum number of depublished records that each
+ * dataset can have.
*/
public Long deletePendingRecordIds(String datasetId, Set recordIds)
throws BadContentException {
@@ -169,7 +167,7 @@ public Long deletePendingRecordIds(String datasetId, Set recordIds)
}
final Query query = morphiaDatastoreProvider.getDatastore()
- .find(DepublishRecordId.class);
+ .find(DepublishRecordId.class);
query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId));
query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIds));
query.filter(Filters.eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD,
@@ -187,12 +185,11 @@ public Long deletePendingRecordIds(String datasetId, Set recordIds)
long countDepublishRecordIdsForDataset(String datasetId) {
return retryableExternalRequestForNetworkExceptions(
() -> morphiaDatastoreProvider.getDatastore().find(DepublishRecordId.class)
- .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).count());
+ .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).count());
}
/**
- * Counts how many records we have for a given dataset that have the status {@link
- * DepublicationStatus#DEPUBLISHED}.
+ * Counts how many records we have for a given dataset that have the status {@link DepublicationStatus#DEPUBLISHED}.
*
* @param datasetId The ID of the dataset to count for.
* @return The number of records.
@@ -200,9 +197,9 @@ long countDepublishRecordIdsForDataset(String datasetId) {
public long countSuccessfullyDepublishedRecordIdsForDataset(String datasetId) {
return retryableExternalRequestForNetworkExceptions(
() -> morphiaDatastoreProvider.getDatastore().find(DepublishRecordId.class)
- .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).filter(Filters
+ .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).filter(Filters
.eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, DepublicationStatus.DEPUBLISHED))
- .count());
+ .count());
}
/**
@@ -236,16 +233,16 @@ public List getDepublishRecordIds(String datasetId, int p
/**
* Get all depublished records for a given dataset.
*
This method is to be used with caution since it doesn't have a limit on the returned items.
- * It is mainly used to minimize, internal to the application, database requests. Ids are returned
- * based on the provided status filter parameter
+ * It is mainly used to minimize, internal to the application, database requests. Ids are returned based on the provided status
+ * filter parameter
*
* @param datasetId The dataset for which to retrieve the records. Cannot be null.
* @param sortField The sorting field. Cannot be null.
* @param sortDirection The sorting direction. Cannot be null.
* @param depublicationStatus The depublication status of the records. Can be null.
* @return A (possibly empty) list of depublish record ids.
- * @throws BadContentException In case the records would violate the maximum number of depublished
- * records that each dataset can have.
+ * @throws BadContentException In case the records would violate the maximum number of depublished records that each dataset can
+ * have.
*/
public Set getAllDepublishRecordIdsWithStatus(String datasetId,
DepublishRecordIdSortField sortField, SortDirection sortDirection,
@@ -258,8 +255,8 @@ public Set getAllDepublishRecordIdsWithStatus(String datasetId,
/**
* Get all depublished records for a given dataset.
*
This method is to be used with caution since it doesn't have a limit on the returned items.
- * It is mainly used to minimize, internal to the application, database requests. Ids are returned
- * based on the provided status filter parameter
+ * It is mainly used to minimize, internal to the application, database requests. Ids are returned based on the provided status
+ * filter parameter
*
* @param datasetId The dataset for which to retrieve the records. Cannot be null.
* @param sortField The sorting field. Cannot be null.
@@ -267,8 +264,8 @@ public Set getAllDepublishRecordIdsWithStatus(String datasetId,
* @param depublicationStatus The depublication status of the records. Can be null.
* @param recordIds The record ids provided, that are to be checked upon. Can be null/empty
* @return A (possibly empty) list of depublish record ids.
- * @throws BadContentException In case the records would violate the maximum number of depublished
- * records that each dataset can have.
+ * @throws BadContentException In case the records would violate the maximum number of depublished records that each dataset can
+ * have.
*/
public Set getAllDepublishRecordIdsWithStatus(String datasetId,
DepublishRecordIdSortField sortField, SortDirection sortDirection,
@@ -302,7 +299,7 @@ private Query prepareQueryForDepublishRecordIds(String datase
DepublicationStatus depublicationStatus, String searchQuery) {
// Create query.
final Query query = morphiaDatastoreProvider.getDatastore()
- .find(DepublishRecordId.class);
+ .find(DepublishRecordId.class);
query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId));
if (Objects.nonNull(depublicationStatus)) {
query.filter(Filters.eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, depublicationStatus));
@@ -316,16 +313,14 @@ private Query prepareQueryForDepublishRecordIds(String datase
}
/**
- * This method marks record ids with the provided {@link DepublicationStatus} and {@link Date}
- * where appropriate.
+ * This method marks record ids with the provided {@link DepublicationStatus} and {@link Date} where appropriate.
*
A {@link DepublicationStatus#PENDING_DEPUBLICATION} unsets the depublication date
*
A {@link DepublicationStatus#DEPUBLISHED} sets the depublication date with the one
* provided
*
* @param datasetId the dataset for which to do this. Cannot be null
- * @param recordIds the records for which to set this. Can be null or empty, in which case the
- * operation will be performed on all records. If it is not empty, a new record will be created if
- * a record with the given record ID is not already present.
+ * @param recordIds the records for which to set this. Can be null or empty, in which case the operation will be performed on
+ * all records. If it is not empty, a new record will be created if a record with the given record ID is not already present.
* @param depublicationStatus the depublication status. Cannot be null
* @param depublicationDate the depublication date. Can be null only if depublicationStatus is
* {@link DepublicationStatus#PENDING_DEPUBLICATION}
@@ -353,8 +348,9 @@ public void markRecordIdsWithDepublicationStatus(String datasetId, Set r
// Add the records that are missing.
final Set recordIdsToAdd = getNonExistingRecordIds(datasetId, recordIds);
final Instant depublicationInstant = Optional.ofNullable(depublicationDate)
- .filter(date -> depublicationStatus != DepublicationStatus.PENDING_DEPUBLICATION)
- .map(Date::toInstant).orElse(null);
+ .filter(
+ date -> depublicationStatus != DepublicationStatus.PENDING_DEPUBLICATION)
+ .map(Date::toInstant).orElse(null);
addRecords(recordIdsToAdd, datasetId, depublicationStatus, depublicationInstant);
// Compute the records to update - if there are none, we're done.
@@ -367,27 +363,29 @@ public void markRecordIdsWithDepublicationStatus(String datasetId, Set r
// Create query.
final Query query = morphiaDatastoreProvider.getDatastore()
- .find(DepublishRecordId.class);
+ .find(DepublishRecordId.class);
query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId));
if (recordIdsToUpdate != null) {
query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIdsToUpdate));
}
// Define the update operations.
- final UpdateOperator firstUpdateOperator = UpdateOperators
- .set(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, depublicationStatus);
- final ArrayList extraUpdateOperators = new ArrayList<>();
+ final ArrayList updateOperators = new ArrayList<>();
+ updateOperators.add(UpdateOperators
+ .set(DepublishRecordId.DEPUBLICATION_STATUS_FIELD,
+ depublicationStatus));
if (depublicationStatus == DepublicationStatus.PENDING_DEPUBLICATION) {
- extraUpdateOperators.add(UpdateOperators.unset(DepublishRecordId.DEPUBLICATION_DATE_FIELD));
+ updateOperators.add(UpdateOperators.unset(DepublishRecordId.DEPUBLICATION_DATE_FIELD));
} else {
- extraUpdateOperators
- .add(UpdateOperators.set(DepublishRecordId.DEPUBLICATION_DATE_FIELD, depublicationDate));
+ updateOperators.add(
+ UpdateOperators.set(DepublishRecordId.DEPUBLICATION_DATE_FIELD,
+ depublicationDate == null? Date.from(Instant.now()): depublicationDate)
+ );
}
// Apply the operations.
retryableExternalRequestForNetworkExceptions(
- () -> query.update(firstUpdateOperator, extraUpdateOperators.toArray(UpdateOperator[]::new))
- .execute(new UpdateOptions().multi(true)));
+ () -> query.update(new UpdateOptions().multi(true), updateOperators.toArray(UpdateOperator[]::new)));
}
/**
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java
index 5ce57785cd..59f7bd8c58 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java
@@ -9,8 +9,8 @@
import dev.morphia.DeleteOptions;
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
-import dev.morphia.query.experimental.filters.Filter;
-import dev.morphia.query.experimental.filters.Filters;
+import dev.morphia.query.filters.Filter;
+import dev.morphia.query.filters.Filters;
import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider;
import eu.europeana.metis.core.rest.RequestLimits;
import eu.europeana.metis.core.workflow.ScheduleFrequence;
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java
index b57ade0a98..9da2e1e619 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java
@@ -6,7 +6,7 @@
import com.mongodb.client.result.DeleteResult;
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
-import dev.morphia.query.experimental.filters.Filters;
+import dev.morphia.query.filters.Filters;
import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider;
import eu.europeana.metis.core.workflow.Workflow;
import eu.europeana.metis.network.ExternalRequestUtil;
diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java
index 3d7811139d..d5f00d761c 100644
--- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java
+++ b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java
@@ -16,25 +16,26 @@
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import dev.morphia.DeleteOptions;
-import dev.morphia.aggregation.experimental.Aggregation;
-import dev.morphia.aggregation.experimental.expressions.ArrayExpressions;
-import dev.morphia.aggregation.experimental.expressions.ComparisonExpressions;
-import dev.morphia.aggregation.experimental.expressions.ConditionalExpressions;
-import dev.morphia.aggregation.experimental.expressions.Expressions;
-import dev.morphia.aggregation.experimental.expressions.MathExpressions;
-import dev.morphia.aggregation.experimental.expressions.impls.Expression;
-import dev.morphia.aggregation.experimental.expressions.impls.MathExpression;
-import dev.morphia.aggregation.experimental.stages.Lookup;
-import dev.morphia.aggregation.experimental.stages.Projection;
-import dev.morphia.aggregation.experimental.stages.Sort;
-import dev.morphia.aggregation.experimental.stages.Unwind;
+import dev.morphia.UpdateOptions;
+import dev.morphia.aggregation.Aggregation;
+import dev.morphia.aggregation.expressions.ArrayExpressions;
+import dev.morphia.aggregation.expressions.ComparisonExpressions;
+import dev.morphia.aggregation.expressions.ConditionalExpressions;
+import dev.morphia.aggregation.expressions.Expressions;
+import dev.morphia.aggregation.expressions.MathExpressions;
+import dev.morphia.aggregation.expressions.impls.Expression;
+import dev.morphia.aggregation.expressions.impls.MathExpression;
+import dev.morphia.aggregation.stages.Lookup;
+import dev.morphia.aggregation.stages.Projection;
+import dev.morphia.aggregation.stages.Sort;
+import dev.morphia.aggregation.stages.Unwind;
import dev.morphia.annotations.Entity;
import dev.morphia.query.FindOptions;
import dev.morphia.query.Query;
-import dev.morphia.query.experimental.filters.Filter;
-import dev.morphia.query.experimental.filters.Filters;
-import dev.morphia.query.experimental.updates.UpdateOperator;
-import dev.morphia.query.experimental.updates.UpdateOperators;
+import dev.morphia.query.filters.Filter;
+import dev.morphia.query.filters.Filters;
+import dev.morphia.query.updates.UpdateOperator;
+import dev.morphia.query.updates.UpdateOperators;
import eu.europeana.metis.authentication.user.MetisUserView;
import eu.europeana.metis.core.common.DaoFieldNames;
import eu.europeana.metis.core.dataset.Dataset;
@@ -99,7 +100,7 @@ public WorkflowExecutionDao(MorphiaDatastoreProvider morphiaDatastoreProvider) {
@Override
public WorkflowExecution create(WorkflowExecution workflowExecution) {
final ObjectId objectId = Optional.ofNullable(workflowExecution.getId())
- .orElseGet(ObjectId::new);
+ .orElseGet(ObjectId::new);
workflowExecution.setId(objectId);
final WorkflowExecution workflowExecutionSaved = retryableExternalRequestForNetworkExceptions(
() -> morphiaDatastoreProvider.getDatastore().save(workflowExecution));
@@ -125,14 +126,14 @@ public String update(WorkflowExecution workflowExecution) {
*/
public void updateWorkflowPlugins(WorkflowExecution workflowExecution) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
+ .find(WorkflowExecution.class)
+ .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
final UpdateOperator updateOperator = UpdateOperators
.set(METIS_PLUGINS.getFieldName(), workflowExecution.getMetisPlugins());
UpdateResult updateResult = retryableExternalRequestForNetworkExceptions(
- () -> query.update(updateOperator).execute());
+ () -> query.update(new UpdateOptions(), updateOperator));
LOGGER.debug(
"WorkflowExecution metisPlugins for datasetId '{}' updated in Mongo. (UpdateResults: {})",
workflowExecution.getDatasetId(),
@@ -140,32 +141,31 @@ public void updateWorkflowPlugins(WorkflowExecution workflowExecution) {
}
/**
- * Overwrites only the portion of the WorkflowExecution that contains the monitor
- * information(plugins, started date, updated date).
+ * Overwrites only the portion of the WorkflowExecution that contains the monitor information(plugins, started date, updated
+ * date).
*
* @param workflowExecution the WorkflowExecution to update
*/
public void updateMonitorInformation(WorkflowExecution workflowExecution) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
- final UpdateOperator firstUpdateOperator = UpdateOperators
- .set(WORKFLOW_STATUS.getFieldName(), workflowExecution.getWorkflowStatus());
- final ArrayList extraUpdateOperators = new ArrayList<>();
+ .find(WorkflowExecution.class)
+ .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
+ final ArrayList updateOperators = new ArrayList<>();
+ updateOperators.add(UpdateOperators
+ .set(WORKFLOW_STATUS.getFieldName(),
+ workflowExecution.getWorkflowStatus()));
if (workflowExecution.getStartedDate() != null) {
- extraUpdateOperators
+ updateOperators
.add(UpdateOperators.set("startedDate", workflowExecution.getStartedDate()));
}
if (workflowExecution.getUpdatedDate() != null) {
- extraUpdateOperators
+ updateOperators
.add(UpdateOperators.set("updatedDate", workflowExecution.getUpdatedDate()));
}
- extraUpdateOperators.add(
+ updateOperators.add(
UpdateOperators.set(METIS_PLUGINS.getFieldName(), workflowExecution.getMetisPlugins()));
UpdateResult updateResult = retryableExternalRequestForNetworkExceptions(
- () -> query
- .update(firstUpdateOperator, extraUpdateOperators.toArray(UpdateOperator[]::new))
- .execute());
+ () -> query.update(new UpdateOptions(), updateOperators.toArray(UpdateOperator[]::new)));
LOGGER.debug(
"WorkflowExecution monitor information for datasetId '{}' updated in Mongo. (UpdateResults: {})",
workflowExecution.getDatasetId(),
@@ -175,16 +175,16 @@ public void updateMonitorInformation(WorkflowExecution workflowExecution) {
/**
* Set the cancelling field in the database.
*
Also adds information of the user identifier that cancelled the execution or if it was by a
- * system operation, using {@link SystemId} values as identifiers. For historical executions the
- * value of the cancelledBy field will remain null
+ * system operation, using {@link SystemId} values as identifiers. For historical executions the value of the
+ * cancelledBy field will remain null
*
* @param workflowExecution the workflowExecution to be cancelled
* @param metisUserView the user that triggered the cancellation or null if it was the system
*/
public void setCancellingState(WorkflowExecution workflowExecution, MetisUserView metisUserView) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
+ .find(WorkflowExecution.class)
+ .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId()));
String cancelledBy;
if (metisUserView == null || metisUserView.getUserId() == null) {
cancelledBy = SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name();
@@ -195,7 +195,7 @@ public void setCancellingState(WorkflowExecution workflowExecution, MetisUserVie
final UpdateOperator setCancelledByOperator = UpdateOperators.set("cancelledBy", cancelledBy);
UpdateResult updateResult = retryableExternalRequestForNetworkExceptions(
- () -> query.update(setCancellingOperator, setCancelledByOperator).execute());
+ () -> query.update(new UpdateOptions(), setCancellingOperator, setCancelledByOperator));
LOGGER.debug(
"WorkflowExecution cancelling for datasetId '{}' set to true in Mongo. (UpdateResults: {})",
workflowExecution.getDatasetId(),
@@ -205,8 +205,8 @@ public void setCancellingState(WorkflowExecution workflowExecution, MetisUserVie
@Override
public WorkflowExecution getById(String id) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), new ObjectId(id)));
+ .find(WorkflowExecution.class)
+ .filter(Filters.eq(ID.getFieldName(), new ObjectId(id)));
return retryableExternalRequestForNetworkExceptions(query::first);
}
@@ -216,8 +216,7 @@ public boolean delete(WorkflowExecution workflowExecution) {
}
/**
- * Get the WorkflowExecution for a dataset identifier that is {@link WorkflowStatus#INQUEUE} or
- * {@link WorkflowStatus#RUNNING}
+ * Get the WorkflowExecution for a dataset identifier that is {@link WorkflowStatus#INQUEUE} or {@link WorkflowStatus#RUNNING}
*
* @param datasetId the dataset identifier
* @return the WorkflowExecution if found
@@ -236,13 +235,12 @@ public WorkflowExecution getRunningOrInQueueExecution(String datasetId) {
public boolean exists(WorkflowExecution workflowExecution) {
return retryableExternalRequestForNetworkExceptions(
() -> morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class)
- .filter(Filters.eq(DATASET_ID.getFieldName(), workflowExecution.getDatasetId()))
- .first(new FindOptions().projection().include(ID.getFieldName()))) != null;
+ .filter(Filters.eq(DATASET_ID.getFieldName(), workflowExecution.getDatasetId()))
+ .first(new FindOptions().projection().include(ID.getFieldName()))) != null;
}
/**
- * Check if a WorkflowExecution exists for a dataset identifier and has not completed it's
- * execution.
+ * Check if a WorkflowExecution exists for a dataset identifier and has not completed it's execution.
*
* @param datasetId the dataset identifier
* @return the identifier of the execution if found, otherwise null
@@ -264,7 +262,7 @@ public String existsAndNotCompleted(String datasetId) {
private Query runningOrInqueueQuery(String datasetId) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class);
+ .find(WorkflowExecution.class);
final Filter datasetIdFilter = Filters.eq(DATASET_ID.getFieldName(), datasetId);
final Filter workflowStatusFilter = Filters
@@ -276,42 +274,36 @@ private Query runningOrInqueueQuery(String datasetId) {
}
/**
- * Get the first successful Plugin of a WorkflowExecution for a dataset identifier and a set of
- * plugin types
+ * Get the first successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types
*
- * @param datasetId the dataset identifier
- * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null
- * values.
+ * @param datasetId the dataset identifier
+ * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values.
* @return the first plugin found
*/
public PluginWithExecutionId getFirstSuccessfulPlugin(String datasetId,
Set pluginTypes) {
return Optional.ofNullable(getFirstOrLastFinishedPlugin(datasetId, pluginTypes, true))
- .orElse(null);
+ .orElse(null);
}
/**
- * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of
- * plugin types
+ * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types
*
- * @param datasetId the dataset identifier
- * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null
- * values.
+ * @param datasetId the dataset identifier
+ * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values.
* @return the last plugin found
*/
public PluginWithExecutionId getLatestSuccessfulPlugin(String datasetId,
Set pluginTypes) {
return Optional.ofNullable(getFirstOrLastFinishedPlugin(datasetId, pluginTypes, false))
- .orElse(null);
+ .orElse(null);
}
/**
- * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of
- * plugin types
+ * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types
*
- * @param datasetId the dataset identifier
- * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null
- * values.
+ * @param datasetId the dataset identifier
+ * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values.
* @param limitToValidData Only return the result if it has valid data (see {@link DataStatus}).
* @return the last plugin found
*/
@@ -324,12 +316,12 @@ public PluginWithExecutionId getLatestSuccessfulExecutablePlug
// Perform the database query. If nothing found, we are done.
final Set convertedPluginTypes = pluginTypes.stream()
- .map(ExecutablePluginType::toPluginType).collect(Collectors.toSet());
+ .map(ExecutablePluginType::toPluginType).collect(Collectors.toSet());
final PluginWithExecutionId uncastResultWrapper =
getFirstOrLastFinishedPlugin(datasetId, convertedPluginTypes, false);
final MetisPlugin uncastResult = Optional.ofNullable(uncastResultWrapper)
- .map(PluginWithExecutionId::getPlugin).orElse(null);
- if (uncastResult == null) {
+ .map(PluginWithExecutionId::getPlugin).orElse(null);
+ if (uncastResultWrapper == null || uncastResult == null) {
return null;
}
@@ -378,15 +370,15 @@ PluginWithExecutionId getFirstOrLastFinishedPlugin(String datasetId
// Query: unwind and match again so that we know that all conditions apply to the same plugin.
final Aggregation aggregation = morphiaDatastoreProvider.getDatastore()
- .aggregate(WorkflowExecution.class);
+ .aggregate(WorkflowExecution.class);
final String orderField =
METIS_PLUGINS.getFieldName() + "." + FINISHED_DATE.getFieldName();
aggregation.match(collectedFilters)
- .unwind(Unwind.on(METIS_PLUGINS.getFieldName()))
- .match(collectedFilters)
- .sort(firstFinished ? Sort.on().ascending(orderField) : Sort.on().descending(orderField))
- .limit(1);
+ .unwind(Unwind.unwind(METIS_PLUGINS.getFieldName()))
+ .match(collectedFilters)
+ .sort(firstFinished ? Sort.sort().ascending(orderField) : Sort.sort().descending(orderField))
+ .limit(1);
final List metisPluginsIterator = MorphiaUtils
.getListOfAggregationRetryable(aggregation,
@@ -394,9 +386,9 @@ PluginWithExecutionId getFirstOrLastFinishedPlugin(String datasetId
// Because of the unwind, we know that the plugin we need is always the first one.
return Optional.ofNullable(metisPluginsIterator).stream().flatMap(Collection::stream)
- .filter(execution -> !execution.getMetisPlugins().isEmpty())
- .map(execution -> new PluginWithExecutionId(execution,
- execution.getMetisPlugins().get(0))).findFirst().orElse(null);
+ .filter(execution -> !execution.getMetisPlugins().isEmpty())
+ .map(execution -> new PluginWithExecutionId(execution,
+ execution.getMetisPlugins().get(0))).findFirst().orElse(null);
}
private void verifyEnumSetIsValidAndNotEmpty(Set extends Enum>> set) {
@@ -408,18 +400,14 @@ private void verifyEnumSetIsValidAndNotEmpty(Set extends Enum>> set) {
/**
* Get all WorkflowExecutions paged.
*
- * @param datasetIds a set of dataset identifiers to filter, can be empty or
- * null to get all
- * @param workflowStatuses a set of workflow statuses to filter, can be empty or
- * null
- * @param orderField the field to be used to sort the results
- * @param ascending a boolean value to request the ordering to ascending or
- * descending
- * @param nextPage The first page to be served (zero-based)
- * @param pageCount How many pages are requested - can be null
- * @param ignoreMaxServedExecutionsLimit whether this method is to apply the limit on the number
- * of executions are served. Be careful when setting this to
- * true.
+ * @param datasetIds a set of dataset identifiers to filter, can be empty or null to get all
+ * @param workflowStatuses a set of workflow statuses to filter, can be empty or null
+ * @param orderField the field to be used to sort the results
+ * @param ascending a boolean value to request the ordering to ascending or descending
+ * @param nextPage The first page to be served (zero-based)
+ * @param pageCount How many pages are requested - can be null
+ * @param ignoreMaxServedExecutionsLimit whether this method is to apply the limit on the number of executions are served. Be
+ * careful when setting this to true.
* @return a list of all the WorkflowExecutions found
*/
public ResultList getAllWorkflowExecutions(Set datasetIds,
@@ -446,7 +434,7 @@ public ResultList getAllWorkflowExecutions(Set datase
// Execute query with correct pagination
final FindOptions findOptions = new FindOptions().skip(pagination.getSkip())
- .limit(pagination.getLimit());
+ .limit(pagination.getLimit());
// Set ordering
if (orderField != null) {
@@ -462,38 +450,37 @@ public ResultList getAllWorkflowExecutions(Set datase
}
/**
- * Get an overview of all WorkflowExecutions. This returns a list of executions ordered to display
- * an overview. First the ones in queue, then those in progress and then those that are finalized.
- * Within these categories they will be sorted by creation date (most recent first). This method
- * does support pagination.
- *
+ * Get an overview of all WorkflowExecutions. This returns a list of executions ordered to display an overview. First the ones
+ * in queue, then those in progress and then those that are finalized. Within these categories they will be sorted by creation
+ * date (most recent first). This method does support pagination.
+ *
* TODO when we migrate
* to mongo 3.4 or later, we can do this easier with new aggregation pipeline stages and
* operators. The main improvements are 1) to try to map the root to the 'execution' variable so
* that we don't have to look it up afterwards, and 2) to use $addFields with $switch to add the
* statusIndex instead of having to go through creating and subtracting the two temporary fields.
*
- * @param datasetIds a set of dataset identifiers to filter, can be empty or null to get all
+ * @param datasetIds a set of dataset identifiers to filter, can be empty or null to get all
* @param pluginStatuses the plugin statuses to filter. Can be null.
- * @param pluginTypes the plugin types to filter. Can be null.
- * @param fromDate the date from where the results should start. Can be null.
- * @param toDate the date to where the results should end. Can be null.
- * @param nextPage the nextPage token
- * @param pageCount the number of pages that are requested
+ * @param pluginTypes the plugin types to filter. Can be null.
+ * @param fromDate the date from where the results should start. Can be null.
+ * @param toDate the date to where the results should end. Can be null.
+ * @param nextPage the nextPage token
+ * @param pageCount the number of pages that are requested
* @return a list of all the WorkflowExecutions found. Is not null.
*/
public ResultList getWorkflowExecutionsOverview(Set datasetIds,
Set pluginStatuses, Set pluginTypes, Date fromDate, Date toDate,
int nextPage, int pageCount) {
return getWorkflowExecutionsOverview(datasetIds, pluginStatuses, pluginTypes, fromDate, toDate,
- createPagination(nextPage, pageCount, false));
+ createPagination(nextPage, pageCount, false));
}
ResultList getWorkflowExecutionsOverview(Set datasetIds,
Set pluginStatuses, Set pluginTypes, Date fromDate, Date toDate,
Pagination pagination) {
- return retryableExternalRequestForNetworkExceptions(() -> {
+ return retryableExternalRequestForNetworkExceptions(() -> {
// Prepare pagination and check that there is something to query
if (pagination.getLimit() < 1) {
@@ -502,7 +489,7 @@ ResultList getWorkflowExecutionsOverview(Set datas
// Create the aggregate pipeline
final Aggregation aggregation = morphiaDatastoreProvider.getDatastore()
- .aggregate(WorkflowExecution.class);
+ .aggregate(WorkflowExecution.class);
// Step 1: create filter to match
final Filter filter = createFilter(datasetIds, pluginStatuses, pluginTypes, fromDate, toDate);
@@ -513,7 +500,7 @@ ResultList getWorkflowExecutionsOverview(Set datas
// Step 3: Sort - first on the status index, then on the createdDate.
aggregation
- .sort(Sort.on().ascending(statusIndexField).descending(CREATED_DATE.getFieldName()));
+ .sort(Sort.sort().ascending(statusIndexField).descending(CREATED_DATE.getFieldName()));
// Step 4: Apply pagination
aggregation.skip(pagination.getSkip()).limit(pagination.getLimit());
@@ -574,11 +561,11 @@ private String determineOrderingStatusIndex(Aggregation aggre
.condition(runningCheckExpression, Expressions.value(RUNNING_POSITION_IN_OVERVIEW),
Expressions.value(0));
- aggregation.project(Projection.of()
- .include(statusInQueueField, inqueueConditionExpression)
- .include(statusRunningField, runningConditionExpression)
- .include(CREATED_DATE.getFieldName())
- .include(DATASET_ID.getFieldName()));
+ aggregation.project(Projection.project()
+ .include(statusInQueueField, inqueueConditionExpression)
+ .include(statusRunningField, runningConditionExpression)
+ .include(CREATED_DATE.getFieldName())
+ .include(DATASET_ID.getFieldName()));
// Step 2: Copy specific positions to final variable: use default position if no position is set.
final String statusIndexField = "statusIndex";
@@ -591,10 +578,10 @@ private String determineOrderingStatusIndex(Aggregation aggre
.condition(sumCheckExpression, Expressions.value(DEFAULT_POSITION_IN_OVERVIEW),
sumExpression);
- aggregation.project(Projection.of()
- .include(statusIndexField, statusIndexExpression)
- .include(CREATED_DATE.getFieldName())
- .include(DATASET_ID.getFieldName()));
+ aggregation.project(Projection.project()
+ .include(statusIndexField, statusIndexExpression)
+ .include(CREATED_DATE.getFieldName())
+ .include(DATASET_ID.getFieldName()));
return statusIndexField;
}
@@ -603,26 +590,27 @@ private void joinDatasetAndWorkflowExecution(Aggregation aggr
// Step 1: Join with the dataset and the execution
final String datasetListField = "datasetList";
final String executionListField = "executionList";
- aggregation.lookup(Lookup.from(Dataset.class).localField(DATASET_ID.getFieldName())
- .foreignField(DATASET_ID.getFieldName()).as(datasetListField));
- aggregation.lookup(Lookup.from(WorkflowExecution.class).localField(ID.getFieldName())
- .foreignField(ID.getFieldName()).as(executionListField));
+ aggregation.lookup(Lookup.lookup(Dataset.class).localField(DATASET_ID.getFieldName())
+ .foreignField(DATASET_ID.getFieldName()).as(datasetListField));
+ aggregation.lookup(Lookup.lookup(WorkflowExecution.class).localField(ID.getFieldName())
+ .foreignField(ID.getFieldName()).as(executionListField));
// Step 2: Keep only the first entry in the dataset and execution lists.
final String datasetField = "dataset";
final String executionField = "execution";
- final Projection projection = Projection.of()
- .include(datasetField,
- ArrayExpressions.elementAt(Expressions.field(datasetListField), Expressions.value(0)))
- .include(executionField, ArrayExpressions
- .elementAt(Expressions.field(executionListField), Expressions.value(0)))
- .suppressId();
+ final Projection projection = Projection.project()
+ .include(datasetField,
+ ArrayExpressions.elementAt(Expressions.field(datasetListField),
+ Expressions.value(0)))
+ .include(executionField, ArrayExpressions
+ .elementAt(Expressions.field(executionListField), Expressions.value(0)))
+ .suppressId();
aggregation.project(projection);
}
/**
- * This object contains a pair consisting of a dataset and an execution. It is meant to be a
- * result of aggregate queries, so the field names cannot easily be changed.
+ * This object contains a pair consisting of a dataset and an execution. It is meant to be a result of aggregate queries, so the
+ * field names cannot easily be changed.
*
Annotation {@link Entity} required so that morphia can handle the aggregations.
*/
@Entity
@@ -637,7 +625,7 @@ public ExecutionDatasetPair() {
/**
* Constructor.
*
- * @param dataset The dataset.
+ * @param dataset The dataset.
* @param execution The execution.
*/
public ExecutionDatasetPair(Dataset dataset, WorkflowExecution execution) {
@@ -655,8 +643,7 @@ public WorkflowExecution getExecution() {
}
/**
- * The number of WorkflowExecutions that would be returned if a get all request would be
- * performed.
+ * The number of WorkflowExecutions that would be returned if a get all request would be performed.
*
* @return the number representing the size during a get all request
*/
@@ -667,8 +654,7 @@ public int getWorkflowExecutionsPerRequest() {
}
/**
- * Set the number of WorkflowExecutions that would be returned if a get all request would be
- * performed.
+ * Set the number of WorkflowExecutions that would be returned if a get all request would be performed.
*
* @param workflowExecutionsPerRequest the number to set to
*/
@@ -709,15 +695,15 @@ public void setMaxServedExecutionListLength(int maxServedExecutionListLength) {
public boolean isCancelled(ObjectId id) {
WorkflowExecution workflowExecution = retryableExternalRequestForNetworkExceptions(() ->
morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), id))
- .first(new FindOptions().projection().include(WORKFLOW_STATUS.getFieldName())));
+ .filter(Filters.eq(ID.getFieldName(), id))
+ .first(new FindOptions().projection().include(WORKFLOW_STATUS.getFieldName())));
return workflowExecution != null
&& workflowExecution.getWorkflowStatus() == WorkflowStatus.CANCELLED;
}
/**
- * Check if a WorkflowExecution using an execution identifier is in a cancelling state. The state
- * before finally being {@link WorkflowStatus#CANCELLED}
+ * Check if a WorkflowExecution using an execution identifier is in a cancelling state. The state before finally being
+ * {@link WorkflowStatus#CANCELLED}
*
* @param id the execution identifier
* @return true for cancelling, false for not cancelling
@@ -725,8 +711,8 @@ public boolean isCancelled(ObjectId id) {
public boolean isCancelling(ObjectId id) {
WorkflowExecution workflowExecution = retryableExternalRequestForNetworkExceptions(
() -> morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class)
- .filter(Filters.eq(ID.getFieldName(), id))
- .first(new FindOptions().projection().include("cancelling")));
+ .filter(Filters.eq(ID.getFieldName(), id))
+ .first(new FindOptions().projection().include("cancelling")));
return workflowExecution != null && workflowExecution.isCancelling();
}
@@ -738,7 +724,7 @@ public boolean isCancelling(ObjectId id) {
*/
public boolean deleteAllByDatasetId(String datasetId) {
Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class);
+ .find(WorkflowExecution.class);
query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId));
DeleteResult deleteResult = retryableExternalRequestForNetworkExceptions(
() -> query.delete(new DeleteOptions().multi(true)));
@@ -756,18 +742,17 @@ public WorkflowExecution getByExternalTaskId(long externalTaskId) {
// TODO JV Validation is disabled because otherwise it complains that the subquery is looking in a
// list of AbstractMetisPlugin objects that don't have the "externalTaskId" property being queried.
final Query query = morphiaDatastoreProvider.getDatastore()
- .find(WorkflowExecution.class).disableValidation();
+ .find(WorkflowExecution.class).disableValidation();
query.filter(Filters.elemMatch(METIS_PLUGINS.getFieldName(),
Filters.eq("externalTaskId", Long.toString(externalTaskId))));
return retryableExternalRequestForNetworkExceptions(query::first);
}
/**
- * This method retrieves the workflow execution that contains a subtask satisfying the given
- * parameters.
+ * This method retrieves the workflow execution that contains a subtask satisfying the given parameters.
*
- * @param plugin The plugin ID representing the subtask.
- * @param datasetId The dataset ID of the workflow execution.
+ * @param plugin The plugin ID representing the subtask.
+ * @param datasetId The dataset ID of the workflow execution.
* @return The workflow execution.
*/
public WorkflowExecution getByTaskExecution(ExecutedMetisPluginId plugin, String datasetId) {
@@ -796,7 +781,7 @@ public WorkflowExecution getAnyByXsltId(String xsltId) {
// Create query to find workflow execution
final Query query =
morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class)
- .disableValidation();
+ .disableValidation();
query.disableValidation().filter(Filters.elemMatch(METIS_PLUGINS.getFieldName(),
Filters.eq(PLUGIN_METADATA.getFieldName() + "." + XSLT_ID.getFieldName(), xsltId)));
return retryableExternalRequestForNetworkExceptions(query::first);
@@ -863,9 +848,9 @@ public static class ResultList {
/**
* Constructor.
*
- * @param results The results.
- * @param maxResultCountReached Whether the maximum result count has been reached (indicating
- * whether next pages will be served).
+ * @param results The results.
+ * @param maxResultCountReached Whether the maximum result count has been reached (indicating whether next pages will be
+ * served).
*/
public ResultList(List results, boolean maxResultCountReached) {
this.results = new ArrayList<>(results);
diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java
index 520dc34938..56b4be524f 100644
--- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java
+++ b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java
@@ -356,8 +356,7 @@ void getListOfFileContentsFromPluginExecution() throws Exception {
// Mock getting the records from eCloud.
final String ecloudId = "ECLOUDID1";
- final CloudTagsResponse cloudTagsResponse = new CloudTagsResponse(ecloudId, false, false,
- false);
+ final CloudTagsResponse cloudTagsResponse = new CloudTagsResponse(ecloudId, false);
when(ecloudDataSetServiceClient
.getRevisionsWithDeletedFlagSetToFalse(anyString(), anyString(), anyString(), anyString(), anyString(),
anyString(), anyInt())).thenReturn(Collections.singletonList(cloudTagsResponse));
diff --git a/metis-core/pom.xml b/metis-core/pom.xml
index 76129fe3f4..d8814c0072 100644
--- a/metis-core/pom.xml
+++ b/metis-core/pom.xml
@@ -4,7 +4,7 @@
metis-frameworkeu.europeana.metis
- 10
+ 11metis-corepom
diff --git a/metis-dereference/metis-dereference-common/pom.xml b/metis-dereference/metis-dereference-common/pom.xml
index 28523fe221..43e45a2ed3 100644
--- a/metis-dereference/metis-dereference-common/pom.xml
+++ b/metis-dereference/metis-dereference-common/pom.xml
@@ -4,7 +4,7 @@
metis-dereferenceeu.europeana.metis
- 10
+ 11metis-dereference-common
diff --git a/metis-dereference/metis-dereference-import/pom.xml b/metis-dereference/metis-dereference-import/pom.xml
index a5cbf00079..e51a419fea 100644
--- a/metis-dereference/metis-dereference-import/pom.xml
+++ b/metis-dereference/metis-dereference-import/pom.xml
@@ -4,7 +4,7 @@
metis-dereferenceeu.europeana.metis
- 10
+ 114.0.0metis-dereference-import
diff --git a/metis-dereference/metis-dereference-rest/pom.xml b/metis-dereference/metis-dereference-rest/pom.xml
index 3c0ce3bc8b..699e0a9257 100644
--- a/metis-dereference/metis-dereference-rest/pom.xml
+++ b/metis-dereference/metis-dereference-rest/pom.xml
@@ -4,7 +4,7 @@
metis-dereferenceeu.europeana.metis
- 10
+ 11metis-dereference-rest
@@ -61,6 +61,11 @@
org.springframeworkspring-webmvc
+
+ eu.europeana.metis
+ metis-common-spring-properties
+ ${project.version}
+ eu.europeana.metismetis-dereference-service
diff --git a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ApplicationConfiguration.java b/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ApplicationConfiguration.java
index cf5ad4ca2f..1eb4a347fb 100644
--- a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ApplicationConfiguration.java
+++ b/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ApplicationConfiguration.java
@@ -2,6 +2,7 @@
import com.mongodb.client.MongoClient;
import eu.europeana.corelib.web.socks.SocksProxy;
+import eu.europeana.metis.dereference.rest.config.properties.MetisDereferenceConfigurationProperties;
import eu.europeana.metis.dereference.service.DereferenceService;
import eu.europeana.metis.dereference.service.DereferencingManagementService;
import eu.europeana.metis.dereference.service.MongoDereferenceService;
@@ -10,18 +11,24 @@
import eu.europeana.metis.dereference.service.dao.VocabularyDao;
import eu.europeana.metis.dereference.vocimport.VocabularyCollectionImporterFactory;
import eu.europeana.metis.mongo.connection.MongoClientProvider;
+import eu.europeana.metis.mongo.connection.MongoProperties;
+import eu.europeana.metis.mongo.connection.MongoProperties.ReadPreferenceValue;
import eu.europeana.metis.utils.CustomTruststoreAppender;
-import eu.europeana.metis.utils.CustomTruststoreAppender.TrustStoreConfigurationException;
import eu.europeana.metis.utils.apm.ElasticAPMConfiguration;
+import java.lang.invoke.MethodHandles;
import java.util.Set;
import javax.annotation.PreDestroy;
+import metis.common.config.properties.SocksProxyConfigurationProperties;
+import metis.common.config.properties.TruststoreConfigurationProperties;
+import metis.common.config.properties.mongo.MongoConfigurationProperties;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Import;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.web.filter.ForwardedHeaderFilter;
@@ -34,59 +41,84 @@
* Entry class with configuration fields and beans initialization for the application.
*/
@Configuration
-@Import({ElasticAPMConfiguration.class})
+@EnableConfigurationProperties({
+ ElasticAPMConfiguration.class, TruststoreConfigurationProperties.class,
+ SocksProxyConfigurationProperties.class, MongoConfigurationProperties.class,
+ MetisDereferenceConfigurationProperties.class})
@EnableScheduling
-@ComponentScan(basePackages = {"eu.europeana.metis.dereference.rest.controller",
+@ComponentScan(basePackages = {
+ "eu.europeana.metis.dereference.rest.controller",
"eu.europeana.metis.dereference.rest.exceptions"})
@EnableWebMvc
public class ApplicationConfiguration implements WebMvcConfigurer {
- private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationConfiguration.class);
-
- private final ConfigurationPropertiesHolder propertiesHolder;
+ private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final MongoClient mongoClient;
+ private ProcessedEntityDao processedEntityDao;
/**
- * Autowired constructor for Spring Configuration class.
+ * Constructor.
*
- * @param propertiesHolder the object that holds all boot configuration values
- * @throws TrustStoreConfigurationException if the configuration of the truststore failed
+ * @param truststoreConfigurationProperties the truststore configuration properties
+ * @param socksProxyConfigurationProperties the socks proxy configuration properties
+ * @param mongoConfigurationProperties the mongo configuration properties
+ * @throws CustomTruststoreAppender.TrustStoreConfigurationException if the configuration of the truststore failed
*/
- public ApplicationConfiguration(ConfigurationPropertiesHolder propertiesHolder) throws TrustStoreConfigurationException {
- mongoClient = ApplicationConfiguration.initializeApplication(propertiesHolder);
- this.propertiesHolder = propertiesHolder;
+ @Autowired
+ public ApplicationConfiguration(TruststoreConfigurationProperties truststoreConfigurationProperties,
+ SocksProxyConfigurationProperties socksProxyConfigurationProperties,
+ MongoConfigurationProperties mongoConfigurationProperties)
+ throws CustomTruststoreAppender.TrustStoreConfigurationException {
+ ApplicationConfiguration.initializeTruststore(truststoreConfigurationProperties);
+ ApplicationConfiguration.initializeSocksProxy(socksProxyConfigurationProperties);
+ this.mongoClient = ApplicationConfiguration.getMongoClient(mongoConfigurationProperties);
}
/**
- * This method performs the initializing tasks for the application.
+ * Truststore initializer
*
- * @param propertiesHolder The properties.
- * @return The Mongo client that can be used to access the mongo database.
- * @throws TrustStoreConfigurationException In case a problem occurred with the truststore.
+ * @param truststoreConfigurationProperties the truststore configuration properties
+ * @throws CustomTruststoreAppender.TrustStoreConfigurationException In case a problem occurred with the truststore.
*/
- static MongoClient initializeApplication(ConfigurationPropertiesHolder propertiesHolder)
- throws TrustStoreConfigurationException {
-
- // Load the trust store file.
- if (StringUtils.isNotEmpty(propertiesHolder.getTruststorePath()) && StringUtils
- .isNotEmpty(propertiesHolder.getTruststorePassword())) {
+ static void initializeTruststore(TruststoreConfigurationProperties truststoreConfigurationProperties)
+ throws CustomTruststoreAppender.TrustStoreConfigurationException {
+ if (StringUtils.isNotEmpty(truststoreConfigurationProperties.getPath()) && StringUtils
+ .isNotEmpty(truststoreConfigurationProperties.getPassword())) {
CustomTruststoreAppender
- .appendCustomTrustoreToDefault(propertiesHolder.getTruststorePath(),
- propertiesHolder.getTruststorePassword());
+ .appendCustomTruststoreToDefault(truststoreConfigurationProperties.getPath(),
+ truststoreConfigurationProperties.getPassword());
LOGGER.info("Custom truststore appended to default truststore");
}
+ }
- // Initialize the socks proxy.
- if (propertiesHolder.isSocksProxyEnabled()) {
- new SocksProxy(propertiesHolder.getSocksProxyHost(), propertiesHolder.getSocksProxyPort(),
- propertiesHolder
- .getSocksProxyUsername(),
- propertiesHolder.getSocksProxyPassword()).init();
+ /**
+ * Socks proxy initializer.
+ *
+ * @param socksProxyConfigurationProperties the socks proxy configuration properties
+ */
+ static void initializeSocksProxy(SocksProxyConfigurationProperties socksProxyConfigurationProperties) {
+ if (socksProxyConfigurationProperties.isEnabled()) {
+ new SocksProxy(socksProxyConfigurationProperties.getHost(), socksProxyConfigurationProperties.getPort(),
+ socksProxyConfigurationProperties.getUsername(),
+ socksProxyConfigurationProperties.getPassword()).init();
LOGGER.info("Socks proxy enabled");
}
+ }
- // Initialize the Mongo connection
- return new MongoClientProvider<>(propertiesHolder.getMongoProperties()).createMongoClient();
+ public static MongoClient getMongoClient(MongoConfigurationProperties mongoConfigurationProperties) {
+ final MongoProperties mongoProperties = new MongoProperties<>(
+ IllegalArgumentException::new);
+ mongoProperties.setAllProperties(
+ mongoConfigurationProperties.getHosts(),
+ mongoConfigurationProperties.getPorts(),
+ mongoConfigurationProperties.getAuthenticationDatabase(),
+ mongoConfigurationProperties.getUsername(),
+ mongoConfigurationProperties.getPassword(),
+ mongoConfigurationProperties.isEnableSsl(),
+ ReadPreferenceValue.PRIMARY_PREFERRED,
+ mongoConfigurationProperties.getApplicationName());
+
+ return new MongoClientProvider<>(mongoProperties).createMongoClient();
}
/**
@@ -133,18 +165,19 @@ public void addViewControllers(ViewControllerRegistry registry) {
}
@Bean
- ProcessedEntityDao getProcessedEntityDao() {
- return new ProcessedEntityDao(mongoClient, propertiesHolder.getEntityDb());
+ ProcessedEntityDao getProcessedEntityDao(MongoConfigurationProperties mongoConfigurationProperties) {
+ processedEntityDao = new ProcessedEntityDao(mongoClient, mongoConfigurationProperties.getDatabase());
+ return processedEntityDao;
}
@Bean
- VocabularyDao getVocabularyDao() {
- return new VocabularyDao(mongoClient, propertiesHolder.getVocabularyDb());
+ VocabularyDao getVocabularyDao(MongoConfigurationProperties mongoConfigurationProperties) {
+ return new VocabularyDao(mongoClient, mongoConfigurationProperties.getDatabase());
}
@Bean
- Set getAllowedUrlDomains() {
- return Set.of(propertiesHolder.getAllowedUrlDomains());
+ Set getAllowedUrlDomains(MetisDereferenceConfigurationProperties metisDereferenceConfigurationProperties) {
+ return Set.of(metisDereferenceConfigurationProperties.getAllowedUrlDomains());
}
/**
@@ -164,19 +197,19 @@ ForwardedHeaderFilter forwardedHeaderFilter() {
* redis instance/cluster is used for multiple services then the cache for other services is cleared as well. This task is
* scheduled by a cron expression.
*/
-
- @Scheduled(cron = "${dereference.purge.emptyxml.frequency}")
+ // TODO: 24/08/2023 Is there a better way to load the configuration here?
+ @Scheduled(cron = "${metis-dereference.getPurgeEmptyXmlFrequency}")
public void dereferenceCacheNullOrEmpty() {
- getProcessedEntityDao().purgeByNullOrEmptyXml();
+ processedEntityDao.purgeByNullOrEmptyXml();
}
/**
* Empty Cache. This will remove ALL entries in the cache (Redis). If the same redis instance/cluster is used for multiple
* services then the cache for other services is cleared as well. This task is scheduled by a cron expression.
*/
- @Scheduled(cron = "${dereference.purge.all.frequency}")
+ @Scheduled(cron = "${metis-dereference.getPurgeAllFrequency}")
public void dereferenceCachePurgeAll() {
- getProcessedEntityDao().purgeAll();
+ processedEntityDao.purgeAll();
}
/**
diff --git a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ConfigurationPropertiesHolder.java b/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ConfigurationPropertiesHolder.java
deleted file mode 100644
index 0c92d35e1c..0000000000
--- a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/config/ConfigurationPropertiesHolder.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package eu.europeana.metis.dereference.rest.config;
-
-import eu.europeana.metis.mongo.connection.MongoProperties;
-import eu.europeana.metis.mongo.connection.MongoProperties.ReadPreferenceValue;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.stereotype.Component;
-
-/**
- * Class that is used to read all configuration properties for the application.
- *
- * It uses {@link PropertySource} to identify the properties on application startup
- *
- * This method dereferences a resource. If the resource's vocabulary specifies a positive iteration count, this method also
- * repeatedly retrieves the 'broader' resources and returns those as well.
- *
- *
- * A resource has references to its 'broader' resources (see {@link #extractBroaderResources(EnrichmentBase, Set)}). As such,
- * the resources form a directed graph and the iteration count is the distance from the requested resource. This method performs
- * a breadth-first search through this graph to retrieve all resources within a certain distance from the requested resource.
- *
- *
The Dereference result contains a collection of dereferenced resources.
- * Note: That could not be null, but could be empty. The deferenced status could have the following values:
- *
- *
NO_VOCABULARY_MATCHING, this occurs if there is no enrichment base and no vocabulary.
- *
NO_ENTITY_FOR_VOCABULARY, this means the resource was found but no vocabulary and enrichment was found.
- *
ENTITY_FOUND_XLT_ERROR, this occurs when an JAXBExcetion happened.
- *
INVALID_URL, this occurs when an URIException happened.
- *
UNKNOWN_ENTITY, this occurs is the entity is unknown.
- *
SUCCESS, this means everything was processed successfully.
- *
- *
- *
- * @param resourceId The resource to dereference.
- * @return An object containing the dereferenced resources and the status of dereference process.
- */
- private DereferenceResult dereferenceResource(String resourceId) {
- DereferenceResult dereferenceResult;
- try {
- // Get the main object to dereference. If null, we are done.
- final DereferenceResultWrapper resource = computeEnrichmentBaseVocabulary(resourceId);
-
- dereferenceResult = checkEmptyEnrichmentBaseAndVocabulary(resource);
-
- if (dereferenceResult == null) {
- // Create value resolver that catches exceptions and logs them.
- final Function> valueResolver = getValueResolver();
-
- // Perform the breadth-first search to search for broader terms (if needed).
- final int iterations = resource.getVocabulary().getIterations();
- final Map> result;
- if (iterations > 0) {
- result = GraphUtils
- .breadthFirstSearch(resourceId,
- new ImmutablePair<>(resource.getEnrichmentBase(), resource.getDereferenceResultStatus()),
- resource.getVocabulary().getIterations(),
- valueResolver, this::extractBroaderResources);
- } else {
- result = new HashMap<>();
- result.put(resourceId, new ImmutablePair<>(resource.getEnrichmentBase(), resource.getDereferenceResultStatus()));
+ /**
+ * Constructor.
+ *
+ * @param retriever Object that retrieves entities from their source services.
+ * @param processedEntityDao Object managing the processed entity cache.
+ * @param vocabularyDao Object that accesses vocabularies.
+ */
+ MongoDereferenceService(RdfRetriever retriever, ProcessedEntityDao processedEntityDao,
+ VocabularyDao vocabularyDao) {
+ this.retriever = retriever;
+ this.processedEntityDao = processedEntityDao;
+ this.vocabularyDao = vocabularyDao;
+ }
+
+ private static DereferenceResult checkEmptyEnrichmentBaseAndVocabulary(
+ DereferenceResultWrapper resource) {
+ DereferenceResult dereferenceResult = null;
+ // No EnrichmentBase and no Vocabulary.
+ if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null
+ && resource.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
+ dereferenceResult = new DereferenceResult(DereferenceResultStatus.NO_VOCABULARY_MATCHING);
+ // No EnrichmentBase, no Vocabulary and an error occurred.
+ } else if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null) {
+ dereferenceResult = new DereferenceResult(resource.getDereferenceResultStatus());
}
- // Done
- dereferenceResult = new DereferenceResult(
- result.values().stream().map(Pair::getLeft).collect(Collectors.toList()),
- result.values().stream().map(Pair::getRight).filter(Objects::nonNull).findFirst()
- .orElse(DereferenceResultStatus.UNKNOWN_ENTITY));
- }
- } catch (JAXBException jaxbException) {
- LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId), jaxbException);
- // No EnrichmentBase + Status
- dereferenceResult = new DereferenceResult(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR);
- } catch (URISyntaxException uriSyntaxException) {
- LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId), uriSyntaxException);
- // No EnrichmentBase + Status
- dereferenceResult = new DereferenceResult(DereferenceResultStatus.INVALID_URL);
+ return dereferenceResult;
}
- return dereferenceResult;
- }
-
- private Function> getValueResolver() {
- return key -> {
- DereferenceResultWrapper result;
- try {
- result = computeEnrichmentBaseVocabulary(key);
- if (result.getEnrichmentBase() == null && result.getVocabulary() == null
- && result.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
- // No EnrichmentBase + Status
- return new ImmutablePair<>(null, DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY);
+
+ private static Stream getStream(Collection collection) {
+ return collection == null ? Stream.empty() : collection.stream();
+ }
+
+ private static DereferenceResultWrapper evaluateTransformedEntityAndVocabulary(
+ VocabularyCandidates vocabularyCandidates,
+ String transformedEntity, Vocabulary chosenVocabulary,
+ MongoDereferencedEntity originalEntity) {
+ final DereferenceResultWrapper dereferenceResultWrapper;
+ // If retrieval or transformation of entity failed, and we have one vocabulary then we store that
+ if (transformedEntity == null && vocabularyCandidates.getVocabularies().size() == 1) {
+ dereferenceResultWrapper = new DereferenceResultWrapper(
+ vocabularyCandidates.getVocabularies().get(0),
+ originalEntity.getDereferenceResultStatus());
} else {
- // EnrichmentBase + Status
- return new ImmutablePair<>(result.getEnrichmentBase(), result.getDereferenceResultStatus());
+ if (transformedEntity == null && chosenVocabulary == null && originalEntity.getDereferenceResultStatus() == null) {
+ dereferenceResultWrapper = new DereferenceResultWrapper((EnrichmentBase) null,
+ null,
+ DereferenceResultStatus.NO_VOCABULARY_MATCHING);
+ } else {
+ dereferenceResultWrapper = new DereferenceResultWrapper(transformedEntity, chosenVocabulary,
+ originalEntity.getDereferenceResultStatus());
+ }
}
- } catch (JAXBException jaxbException) {
- LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key), jaxbException);
- // No EnrichmentBase + Status
- return new ImmutablePair<>(null, DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR);
- } catch (URISyntaxException uriSyntaxException) {
- LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key), uriSyntaxException);
- // No EnrichmentBase + Status
- return new ImmutablePair<>(null, DereferenceResultStatus.INVALID_URL);
- }
- };
- }
-
- private static DereferenceResult checkEmptyEnrichmentBaseAndVocabulary(DereferenceResultWrapper resource) {
- DereferenceResult dereferenceResult = null;
- // No EnrichmentBase and no Vocabulary.
- if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null
- && resource.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
- dereferenceResult = new DereferenceResult(DereferenceResultStatus.NO_VOCABULARY_MATCHING);
- // No EnrichmentBase, no Vocabulary and an error occurred.
- } else if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null) {
- dereferenceResult = new DereferenceResult(resource.getDereferenceResultStatus());
+ return dereferenceResultWrapper;
}
- return dereferenceResult;
- }
-
- private void extractBroaderResources(Pair resource, Set destination) {
- final Stream resourceIdStream;
- if (resource.getLeft() instanceof Concept) {
- resourceIdStream = getStream(((Concept) resource.getLeft()).getBroader()).map(Resource::getResource);
- } else if (resource.getLeft() instanceof TimeSpan) {
- resourceIdStream = Optional.ofNullable(((TimeSpan) resource.getLeft()).getIsPartOf()).stream()
- .flatMap(List::stream).map(LabelResource::getResource);
- } else if (resource.getLeft() instanceof Place) {
- resourceIdStream = Optional.ofNullable(((Place) resource.getLeft()).getIsPartOf()).stream()
- .flatMap(Collection::stream).map(LabelResource::getResource);
- } else {
- resourceIdStream = Stream.empty();
+
+ /**
+ * Mongo dereference implementation
+ *
+ * @param resourceId The resource ID (URI) to dereference
+ * @return Dereference results with dereference status.
+ * @throws IllegalArgumentException In case the Parameter is null.
+ */
+ @Override
+ public DereferenceResult dereference(String resourceId) {
+ // Sanity check
+ if (resourceId == null) {
+ throw new IllegalArgumentException("Parameter resourceId cannot be null.");
+ }
+
+ return dereferenceResource(resourceId);
}
- resourceIdStream.filter(Objects::nonNull).forEach(destination::add);
- }
-
- private static Stream getStream(Collection collection) {
- return collection == null ? Stream.empty() : collection.stream();
- }
-
- /**
- * Computes the entity and vocabulary.
- *
It will use the cache if it's still valid, otherwise it will retrieve(if applicable) the
- * original entity and transform the result.
- *
The possible outcomes are:
- *
- *
Both items of the pair are null. We do not have a vocabulary candidate or we have more
- * than one vocabulary candidate and all have not succeed either retrieving the original
- * entity or transforming the retrieved entity.
- *
Entity xml(Left) is null, and vocabulary(Right) is non null. We have a vocabulary
- * and the entity xml failed either to be retried or failed transformation.
- *
Entity xml(Left) is non null, and vocabulary(Right) is non null. We have a
- * successful retrieval and transformation.
- *
- *
- *
- * @param resourceId the url of the provider entity
- * @param cachedEntity the cached entity object
- * @return a EnrichmentEntityVocabulary with the entity, vocabulary, and status.
- * @throws URISyntaxException if the resource identifier url is invalid
- */
- private DereferenceResultWrapper computeEntityVocabulary(String resourceId, ProcessedEntity cachedEntity)
- throws URISyntaxException {
-
- final DereferenceResultWrapper transformedEntityVocabulary;
-
- //Check if vocabulary actually exists
- Vocabulary cachedVocabulary = null;
- boolean cachedVocabularyChanged = false;
- if (cachedEntity != null && StringUtils.isNotBlank(cachedEntity.getVocabularyId())) {
- cachedVocabulary = vocabularyDao.get(cachedEntity.getVocabularyId());
- cachedVocabularyChanged = cachedVocabulary == null;
+
+ /**
+ *
+ * This method dereferences a resource. If the resource's vocabulary specifies a positive
+ * iteration count, this method also repeatedly retrieves the 'broader' resources and returns
+ * those as well.
+ *
+ *
+ * A resource has references to its 'broader' resources (see
+ * {@link #extractBroaderResources(EnrichmentBase, Set)}). As such, the resources form a directed
+ * graph and the iteration count is the distance from the requested resource. This method performs
+ * a breadth-first search through this graph to retrieve all resources within a certain distance
+ * from the requested resource.
+ *
+ *
The Dereference result contains a collection of dereferenced resources.
+ * Note: That could not be null, but could be empty. The deferenced status could have the
+ * following values:
+ *
+ *
NO_VOCABULARY_MATCHING, this occurs if there is no enrichment base and no vocabulary.
+ *
NO_ENTITY_FOR_VOCABULARY, this means the resource was found but no vocabulary and enrichment was found.
+ *
ENTITY_FOUND_XLT_ERROR, this occurs when an JAXBExcetion happened.
+ *
INVALID_URL, this occurs when an URIException happened.
+ *
UNKNOWN_EUROPEANA_ENTITY, this occurs when the europeana entity is unknown.
+ *
SUCCESS, this means everything was processed successfully.
+ *
+ *
+ *
+ * @param resourceId The resource to dereference.
+ * @return An object containing the dereferenced resources and the status of dereference process.
+ */
+ private DereferenceResult dereferenceResource(String resourceId) {
+ DereferenceResult dereferenceResult;
+ try {
+ // Get the main object to dereference. If null, we are done.
+ final DereferenceResultWrapper resource = computeEnrichmentBaseVocabulary(resourceId);
+
+ dereferenceResult = checkEmptyEnrichmentBaseAndVocabulary(resource);
+
+ if (dereferenceResult == null) {
+ // Create value resolver that catches exceptions and logs them.
+ final Function> valueResolver = getValueResolver();
+
+ // Perform the breadth-first search to search for broader terms (if needed).
+ final int iterations = resource.getVocabulary().getIterations();
+ final Map> result;
+ if (iterations > 0) {
+ result = GraphUtils
+ .breadthFirstSearch(resourceId,
+ new ImmutablePair<>(resource.getEnrichmentBase(),
+ resource.getDereferenceResultStatus()),
+ resource.getVocabulary().getIterations(),
+ valueResolver, this::extractBroaderResources);
+ } else {
+ result = new HashMap<>();
+ result.put(resourceId, new ImmutablePair<>(resource.getEnrichmentBase(),
+ resource.getDereferenceResultStatus()));
+ }
+ // Done
+ dereferenceResult = new DereferenceResult(
+ result.values().stream().map(Pair::getLeft).collect(Collectors.toList()),
+ result.values().stream().map(Pair::getRight).filter(Objects::nonNull).findFirst()
+ .orElse(DereferenceResultStatus.SUCCESS));
+ }
+ } catch (JAXBException jaxbException) {
+ LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId),
+ jaxbException);
+ // No EnrichmentBase + Status
+ dereferenceResult = new DereferenceResult(
+ DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR);
+ } catch (URISyntaxException uriSyntaxException) {
+ LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId),
+ uriSyntaxException);
+ // No EnrichmentBase + Status
+ dereferenceResult = new DereferenceResult(DereferenceResultStatus.INVALID_URL);
+ }
+ return dereferenceResult;
}
- // If we do not have any cached entity, we need to compute it
- if (cachedEntity == null || cachedVocabularyChanged) {
- transformedEntityVocabulary = retrieveAndTransformEntity(resourceId);
- saveEntity(resourceId, cachedEntity, new DereferenceResultWrapper(transformedEntityVocabulary.getEntity(),
- transformedEntityVocabulary.getVocabulary()));
- } else {
- // If we have something in the cache we return that instead
- transformedEntityVocabulary = new DereferenceResultWrapper(cachedEntity.getXml(),
- cachedVocabulary, DereferenceResultStatus.SUCCESS);
+ private Function> getValueResolver() {
+ return key -> {
+ DereferenceResultWrapper result;
+ try {
+ result = computeEnrichmentBaseVocabulary(key);
+ if (result.getEnrichmentBase() == null && result.getVocabulary() == null
+ && result.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
+ // No EnrichmentBase + Status
+ return new ImmutablePair<>(null, DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY);
+ } else {
+ // EnrichmentBase + Status
+ return new ImmutablePair<>(result.getEnrichmentBase(),
+ result.getDereferenceResultStatus());
+ }
+ } catch (JAXBException jaxbException) {
+ LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key),
+ jaxbException);
+ // No EnrichmentBase + Status
+ return new ImmutablePair<>(null, DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR);
+ } catch (URISyntaxException uriSyntaxException) {
+ LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key),
+ uriSyntaxException);
+ // No EnrichmentBase + Status
+ return new ImmutablePair<>(null, DereferenceResultStatus.INVALID_URL);
+ }
+ };
}
- return transformedEntityVocabulary;
- }
-
- private DereferenceResultWrapper retrieveAndTransformEntity(String resourceId) throws URISyntaxException {
-
- final VocabularyCandidates vocabularyCandidates = VocabularyCandidates
- .findVocabulariesForUrl(resourceId, vocabularyDao::getByUriSearch);
-
- String transformedEntity = null;
- Vocabulary chosenVocabulary = null;
-
- MongoDereferencedEntity originalEntity = new MongoDereferencedEntity(resourceId, null);
- MongoDereferencedEntity entityTransformed = new MongoDereferencedEntity(null, null);
- //Only if we have vocabularies we continue
- if (!vocabularyCandidates.isEmpty()) {
- originalEntity = retrieveOriginalEntity(resourceId, vocabularyCandidates);
- //If original entity exists, try transformation
- if (originalEntity.getEntity() != null && originalEntity.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
- // Transform the original entity and find vocabulary if applicable.
- for (Vocabulary vocabulary : vocabularyCandidates.getVocabularies()) {
- entityTransformed = transformEntity(vocabulary, originalEntity.getEntity(), resourceId);
- transformedEntity = entityTransformed.getEntity();
- if (transformedEntity != null) {
- chosenVocabulary = vocabulary;
- break;
- }
+ private void extractBroaderResources(Pair resource,
+ Set destination) {
+ final Stream resourceIdStream;
+ if (resource.getLeft() instanceof Concept) {
+ resourceIdStream = getStream(((Concept) resource.getLeft()).getBroader()).map(
+ Resource::getResource);
+ } else if (resource.getLeft() instanceof TimeSpan) {
+ resourceIdStream = Optional.ofNullable(((TimeSpan) resource.getLeft()).getIsPartOf()).stream()
+ .flatMap(List::stream).map(LabelResource::getResource);
+ } else if (resource.getLeft() instanceof Place) {
+ resourceIdStream = Optional.ofNullable(((Place) resource.getLeft()).getIsPartOf()).stream()
+ .flatMap(Collection::stream).map(LabelResource::getResource);
+ } else {
+ resourceIdStream = Stream.empty();
}
- // There was an update in transforming, so we update the result status.
- if (originalEntity.getDereferenceResultStatus() != entityTransformed.getDereferenceResultStatus()) {
- originalEntity = new MongoDereferencedEntity(originalEntity.getEntity(),
- entityTransformed.getDereferenceResultStatus());
+ resourceIdStream.filter(Objects::nonNull).forEach(destination::add);
+ }
+
+ /**
+ * Computes the entity and vocabulary.
+ *
It will use the cache if it's still valid, otherwise it will retrieve(if applicable) the
+ * original entity and transform the result.
+ *
The possible outcomes are:
+ *
+ *
Both items of the pair are null. We do not have a vocabulary candidate or we have more
+ * than one vocabulary candidate and all have not succeed either retrieving the original
+ * entity or transforming the retrieved entity.
+ *
Entity xml(Left) is null, and vocabulary(Right) is non null. We have a vocabulary
+ * and the entity xml failed either to be retried or failed transformation.
+ *
Entity xml(Left) is non null, and vocabulary(Right) is non null. We have a
+ * successful retrieval and transformation.
+ *
+ *
+ *
+ * @param resourceId the url of the provider entity
+ * @param cachedEntity the cached entity object
+ * @return a EnrichmentEntityVocabulary with the entity, vocabulary, and status.
+ * @throws URISyntaxException if the resource identifier url is invalid
+ */
+ private DereferenceResultWrapper computeEntityVocabulary(String resourceId,
+ ProcessedEntity cachedEntity)
+ throws URISyntaxException {
+
+ final DereferenceResultWrapper transformedEntityVocabulary;
+
+ //Check if vocabulary actually exists
+ Vocabulary cachedVocabulary = null;
+ boolean cachedVocabularyChanged = false;
+ if (cachedEntity != null && StringUtils.isNotBlank(cachedEntity.getVocabularyId())) {
+ cachedVocabulary = vocabularyDao.get(cachedEntity.getVocabularyId());
+ cachedVocabularyChanged = cachedVocabulary == null;
+ }
+
+ // If we do not have any cached entity, we need to compute it
+ if (cachedEntity == null || cachedVocabularyChanged) {
+ transformedEntityVocabulary = retrieveAndTransformEntity(resourceId);
+ saveEntity(resourceId, cachedEntity,
+ new DereferenceResultWrapper(transformedEntityVocabulary.getEntity(),
+ transformedEntityVocabulary.getVocabulary()));
+ } else {
+ // if there was no xml entity but a vocabulary that means no entity for vocabulary
+ if (cachedEntity.getXml() == null && StringUtils.isNotBlank(cachedEntity.getVocabularyId())) {
+ transformedEntityVocabulary = new DereferenceResultWrapper((EnrichmentBase) null,
+ cachedVocabulary, DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY);
+ } else {
+ // otherwise If we have something in the cache we return that instead
+ transformedEntityVocabulary = new DereferenceResultWrapper(cachedEntity.getXml(),
+ cachedVocabulary, DereferenceResultStatus.SUCCESS);
+ }
}
- }
+
+ return transformedEntityVocabulary;
}
- return evaluateTransformedEntityAndVocabulary(vocabularyCandidates, transformedEntity, chosenVocabulary, originalEntity);
- }
-
- private static DereferenceResultWrapper evaluateTransformedEntityAndVocabulary(VocabularyCandidates vocabularyCandidates,
- String transformedEntity, Vocabulary chosenVocabulary, MongoDereferencedEntity originalEntity) {
- final DereferenceResultWrapper dereferenceResultWrapper;
- // If retrieval or transformation of entity failed, and we have one vocabulary then we store that
- if (transformedEntity == null && vocabularyCandidates.getVocabularies().size() == 1) {
- dereferenceResultWrapper = new DereferenceResultWrapper(vocabularyCandidates.getVocabularies().get(0),
- originalEntity.getDereferenceResultStatus());
- } else {
- dereferenceResultWrapper = new DereferenceResultWrapper(transformedEntity, chosenVocabulary,
- originalEntity.getDereferenceResultStatus());
+ private DereferenceResultWrapper retrieveAndTransformEntity(String resourceId)
+ throws URISyntaxException {
+
+ final VocabularyCandidates vocabularyCandidates = VocabularyCandidates
+ .findVocabulariesForUrl(resourceId, vocabularyDao::getByUriSearch);
+
+ String transformedEntity = null;
+ Vocabulary chosenVocabulary = null;
+
+ MongoDereferencedEntity originalEntity = new MongoDereferencedEntity(resourceId, null);
+ MongoDereferencedEntity entityTransformed = new MongoDereferencedEntity(null, null);
+ //Only if we have vocabularies we continue
+ if (!vocabularyCandidates.isEmpty()) {
+ originalEntity = retrieveOriginalEntity(resourceId, vocabularyCandidates);
+ //If original entity exists, try transformation
+ if (originalEntity.getEntity() != null
+ && originalEntity.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) {
+ // Transform the original entity and find vocabulary if applicable.
+ for (Vocabulary vocabulary : vocabularyCandidates.getVocabularies()) {
+ entityTransformed = transformEntity(vocabulary, originalEntity.getEntity(), resourceId);
+ transformedEntity = entityTransformed.getEntity();
+ if (transformedEntity != null) {
+ chosenVocabulary = vocabulary;
+ break;
+ }
+ }
+ // There was an update in transforming, so we update the result status.
+ if (originalEntity.getDereferenceResultStatus()
+ != entityTransformed.getDereferenceResultStatus()) {
+ originalEntity = new MongoDereferencedEntity(originalEntity.getEntity(),
+ entityTransformed.getDereferenceResultStatus());
+ }
+ }
+ }
+
+ return evaluateTransformedEntityAndVocabulary(vocabularyCandidates, transformedEntity,
+ chosenVocabulary, originalEntity);
}
- return dereferenceResultWrapper;
- }
-
- private void saveEntity(String resourceId, ProcessedEntity cachedEntity,
- DereferenceResultWrapper transformedEntityAndVocabularyPair) {
-
- final String entityXml = transformedEntityAndVocabularyPair.getEntity();
- final Vocabulary vocabulary = transformedEntityAndVocabularyPair.getVocabulary();
- final String vocabularyIdString = Optional.ofNullable(vocabulary).map(Vocabulary::getId)
- .map(ObjectId::toString).orElse(null);
- //Save entity
- ProcessedEntity entityToCache = (cachedEntity == null) ? new ProcessedEntity() : cachedEntity;
- entityToCache.setResourceId(resourceId);
- entityToCache.setXml(entityXml);
- entityToCache.setVocabularyId(vocabularyIdString);
- processedEntityDao.save(entityToCache);
- }
-
- private MongoDereferencedEntity transformEntity(Vocabulary vocabulary,
- final String originalEntity, final String resourceId) {
- Optional result;
- DereferenceResultStatus resultStatus;
- try {
- final IncomingRecordToEdmTransformer incomingRecordToEdmTransformer = new IncomingRecordToEdmTransformer(
- vocabulary.getXslt());
- result = incomingRecordToEdmTransformer.transform(originalEntity, resourceId);
- if (result.isEmpty()) {
- resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS;
- } else {
- resultStatus = DereferenceResultStatus.SUCCESS;
- }
- } catch (TransformerException | BadContentException | ParserConfigurationException e) {
- LOGGER.warn("Error transforming entity: {} with message: {}", resourceId, e.getMessage());
- LOGGER.debug("Transformation issue: ", e);
- resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR;
- result = Optional.empty();
+
+ private void saveEntity(String resourceId, ProcessedEntity cachedEntity,
+ DereferenceResultWrapper transformedEntityAndVocabularyPair) {
+
+ final String entityXml = transformedEntityAndVocabularyPair.getEntity();
+ final Vocabulary vocabulary = transformedEntityAndVocabularyPair.getVocabulary();
+ final String vocabularyIdString = Optional.ofNullable(vocabulary).map(Vocabulary::getId)
+ .map(ObjectId::toString).orElse(null);
+ //Save entity
+ ProcessedEntity entityToCache = (cachedEntity == null) ? new ProcessedEntity() : cachedEntity;
+ entityToCache.setResourceId(resourceId);
+ entityToCache.setXml(entityXml);
+ entityToCache.setVocabularyId(vocabularyIdString);
+ processedEntityDao.save(entityToCache);
}
- return new MongoDereferencedEntity(result.orElse(null), resultStatus);
- }
-
- private MongoDereferencedEntity retrieveOriginalEntity(String resourceId, VocabularyCandidates candidates) {
- DereferenceResultStatus dereferenceResultStatus = DereferenceResultStatus.SUCCESS;
-
- if (candidates.isEmpty()) {
- dereferenceResultStatus = DereferenceResultStatus.NO_VOCABULARY_MATCHING;
- return new MongoDereferencedEntity(null, dereferenceResultStatus);
- } else {
- try {
- // Check the input (check the resource ID for URI syntax).
- new URI(resourceId);
- } catch (URISyntaxException e) {
- LOGGER.error("Invalid URI: {} with message: {}", resourceId, e.getMessage());
- dereferenceResultStatus = DereferenceResultStatus.INVALID_URL;
- return new MongoDereferencedEntity(null, dereferenceResultStatus);
- }
- // Compute the result (a URI syntax issue is considered a problem with the suffix).
- final String originalEntity = candidates.getVocabulariesSuffixes().stream().map(suffix -> {
+
+ private MongoDereferencedEntity transformEntity(Vocabulary vocabulary,
+ final String originalEntity, final String resourceId) {
+ Optional result;
+ DereferenceResultStatus resultStatus;
try {
- return retriever.retrieve(resourceId, suffix);
- } catch (IOException | URISyntaxException e) {
- LOGGER.warn("Failed to retrieve: {} with message: {}", resourceId, e.getMessage());
- LOGGER.debug("Problem retrieving resource.", e);
- return null;
+ final IncomingRecordToEdmTransformer incomingRecordToEdmTransformer = new IncomingRecordToEdmTransformer(
+ vocabulary.getXslt());
+ result = incomingRecordToEdmTransformer.transform(originalEntity, resourceId);
+ if (result.isEmpty()) {
+ resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS;
+ } else {
+ resultStatus = DereferenceResultStatus.SUCCESS;
+ }
+ } catch (TransformerException | BadContentException | ParserConfigurationException e) {
+ LOGGER.warn("Error transforming entity: {} with message: {}", resourceId, e.getMessage());
+ LOGGER.debug("Transformation issue: ", e);
+ resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR;
+ result = Optional.empty();
+ }
+ return new MongoDereferencedEntity(result.orElse(null), resultStatus);
+ }
+
+ private MongoDereferencedEntity retrieveOriginalEntity(String resourceId,
+ VocabularyCandidates candidates) {
+ DereferenceResultStatus dereferenceResultStatus = DereferenceResultStatus.SUCCESS;
+
+ if (candidates.isEmpty()) {
+ dereferenceResultStatus = DereferenceResultStatus.NO_VOCABULARY_MATCHING;
+ return new MongoDereferencedEntity(null, dereferenceResultStatus);
+ } else {
+ try {
+ // Check the input (check the resource ID for URI syntax).
+ new URI(resourceId);
+ } catch (URISyntaxException e) {
+ LOGGER.error("Invalid URI: {} with message: {}", resourceId, e.getMessage());
+ dereferenceResultStatus = DereferenceResultStatus.INVALID_URL;
+ return new MongoDereferencedEntity(null, dereferenceResultStatus);
+ }
+ // Compute the result (a URI syntax issue is considered a problem with the suffix).
+ final String originalEntity = candidates.getVocabulariesSuffixes().stream().map(suffix -> {
+ try {
+ return retriever.retrieve(resourceId, suffix);
+ } catch (IOException | URISyntaxException e) {
+ LOGGER.warn("Failed to retrieve: {} with message: {}", resourceId, e.getMessage());
+ LOGGER.debug("Problem retrieving resource.", e);
+ return null;
+ }
+ }).filter(Objects::nonNull).findAny().orElse(null);
+
+ // Evaluate the result.
+ if (originalEntity == null) {
+ if (LOGGER.isInfoEnabled()) {
+ LOGGER.info("No entity XML for uri {}", CRLF_PATTERN.matcher(resourceId).replaceAll(""));
+ }
+ dereferenceResultStatus = DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY;
+ }
+ return new MongoDereferencedEntity(originalEntity, dereferenceResultStatus);
}
- }).filter(Objects::nonNull).findAny().orElse(null);
-
- // Evaluate the result.
- if (originalEntity == null && LOGGER.isInfoEnabled()) {
- LOGGER.info("No entity XML for uri {}", CRLF_PATTERN.matcher(resourceId).replaceAll(""));
- dereferenceResultStatus = DereferenceResultStatus.UNKNOWN_ENTITY;
- }
- return new MongoDereferencedEntity(originalEntity, dereferenceResultStatus);
}
- }
-
- DereferenceResultWrapper computeEnrichmentBaseVocabulary(String resourceId) throws JAXBException, URISyntaxException {
- // Try to get the entity and its vocabulary from the cache.
- final ProcessedEntity cachedEntity = processedEntityDao.getByResourceId(resourceId);
- final DereferenceResultWrapper result = computeEntityVocabulary(resourceId, cachedEntity);
-
- // Parse the entity.
- if (result.getEntity() == null || result.getVocabulary() == null) {
- return new DereferenceResultWrapper(result.getDereferenceResultStatus());
- } else {
- return new DereferenceResultWrapper(
- EnrichmentBaseConverter.convertToEnrichmentBase(result.getEntity()),
- result.getVocabulary(),
- result.getDereferenceResultStatus());
+
+ DereferenceResultWrapper computeEnrichmentBaseVocabulary(String resourceId)
+ throws JAXBException, URISyntaxException {
+ // Try to get the entity and its vocabulary from the cache.
+ final ProcessedEntity cachedEntity = processedEntityDao.getByResourceId(resourceId);
+ final DereferenceResultWrapper result = computeEntityVocabulary(resourceId, cachedEntity);
+
+ // Parse the entity.
+ if (result.getEntity() == null || result.getVocabulary() == null) {
+ return new DereferenceResultWrapper(result.getDereferenceResultStatus());
+ } else {
+ return new DereferenceResultWrapper(
+ EnrichmentBaseConverter.convertToEnrichmentBase(result.getEntity()),
+ result.getVocabulary(),
+ result.getDereferenceResultStatus());
+ }
}
- }
}
diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/ProcessedEntityDao.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/ProcessedEntityDao.java
index e9a6a6c0c8..7c6c808df9 100644
--- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/ProcessedEntityDao.java
+++ b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/ProcessedEntityDao.java
@@ -10,7 +10,7 @@
import dev.morphia.mapping.DiscriminatorFunction;
import dev.morphia.mapping.MapperOptions;
import dev.morphia.mapping.NamingStrategy;
-import dev.morphia.query.experimental.filters.Filters;
+import dev.morphia.query.filters.Filters;
import eu.europeana.metis.dereference.ProcessedEntity;
import java.util.Optional;
import org.bson.types.ObjectId;
diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/VocabularyDao.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/VocabularyDao.java
index 52bdb7d9ed..b8b1b48827 100644
--- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/VocabularyDao.java
+++ b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/dao/VocabularyDao.java
@@ -11,7 +11,7 @@
import dev.morphia.mapping.MapperOptions;
import dev.morphia.mapping.NamingStrategy;
import dev.morphia.query.Query;
-import dev.morphia.query.experimental.filters.Filters;
+import dev.morphia.query.filters.Filters;
import eu.europeana.metis.dereference.Vocabulary;
import java.util.List;
import java.util.regex.Pattern;
diff --git a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java
index 1be759e4c6..12dc8cd855 100644
--- a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java
+++ b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java
@@ -15,6 +15,7 @@
import com.mongodb.client.MongoClients;
import dev.morphia.Datastore;
import eu.europeana.enrichment.api.external.DereferenceResultStatus;
+import eu.europeana.enrichment.api.external.model.Concept;
import eu.europeana.enrichment.api.external.model.Place;
import eu.europeana.metis.dereference.DereferenceResult;
import eu.europeana.metis.dereference.RdfRetriever;
@@ -111,7 +112,7 @@ void testDereference_AbsentObject() throws JAXBException, URISyntaxException {
}
@Test
- void testDereference_UnknownEntity() throws JAXBException, URISyntaxException, IOException {
+ void testDereference_UnknownEuropeanaEntity() throws JAXBException, URISyntaxException, IOException {
// Create vocabulary for geonames and save it.
final Vocabulary geonames = new Vocabulary();
geonames.setUris(Collections.singleton("http://sws.geonames.org/"));
@@ -131,7 +132,43 @@ void testDereference_UnknownEntity() throws JAXBException, URISyntaxException, I
final DereferenceResult emptyResult = service.dereference(entityId);
assertNotNull(emptyResult);
assertFalse(emptyResult.getEnrichmentBasesAsList().isEmpty());
- assertEquals(DereferenceResultStatus.UNKNOWN_ENTITY, emptyResult.getDereferenceStatus());
+ assertEquals(DereferenceResultStatus.SUCCESS, emptyResult.getDereferenceStatus());
+ }
+
+ @Test
+ void testDereference_NoVocabularyMatching() {
+ // Create concept
+ final Concept concept = new Concept();
+ final String entityId = "http://data.europeana.eu/concept/XXXXXXXXX";
+ concept.setAbout(entityId);
+
+ final DereferenceResult emptyResult = service.dereference(entityId);
+ assertNotNull(emptyResult);
+ assertTrue(emptyResult.getEnrichmentBasesAsList().isEmpty());
+ assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, emptyResult.getDereferenceStatus());
+ }
+
+ @Test
+ void testDereference_NoEntityForVocabulary() throws JAXBException, URISyntaxException, IOException {
+ // Create vocabulary for geonames and save it.
+ final Vocabulary geonames = new Vocabulary();
+ geonames.setUris(Collections.singleton("http://sws.geonames.org/"));
+ geonames.setXslt(IOUtils
+ .toString(Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("geonames.xsl")),
+ StandardCharsets.UTF_8));
+ geonames.setName("Geonames");
+ geonames.setIterations(0);
+ vocabularyDaoDatastore.save(geonames);
+
+ // Create geonames entity
+ final Place place = new Place();
+ final String entityId = "http://sws.geonames.org/302025X/";
+ place.setAbout(entityId);
+
+ final DereferenceResult emptyResult = service.dereference(entityId);
+ assertNotNull(emptyResult);
+ assertTrue(emptyResult.getEnrichmentBasesAsList().isEmpty());
+ assertEquals(DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY, emptyResult.getDereferenceStatus());
}
@Test
diff --git a/metis-dereference/pom.xml b/metis-dereference/pom.xml
index 0af1f8402b..74250fbd53 100644
--- a/metis-dereference/pom.xml
+++ b/metis-dereference/pom.xml
@@ -4,7 +4,7 @@
4.0.0eu.europeana.metis
- 10
+ 11metis-framework
diff --git a/metis-enrichment/metis-enrichment-client/pom.xml b/metis-enrichment/metis-enrichment-client/pom.xml
index fc17b5735d..896ad8d82f 100644
--- a/metis-enrichment/metis-enrichment-client/pom.xml
+++ b/metis-enrichment/metis-enrichment-client/pom.xml
@@ -4,7 +4,7 @@
metis-enrichmenteu.europeana.metis
- 10
+ 11metis-enrichment-clientjar
@@ -67,5 +67,11 @@
org.junit.jupiterjunit-jupiter-params
+
+ org.glassfish.jersey.core
+ jersey-common
+ ${version.jersey}
+ test
+
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/Dereferencer.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/Dereferencer.java
index b4edec2e52..2288f2a78b 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/Dereferencer.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/Dereferencer.java
@@ -1,9 +1,10 @@
package eu.europeana.enrichment.rest.client.dereference;
+import eu.europeana.enrichment.api.internal.ReferenceTerm;
import eu.europeana.enrichment.rest.client.report.Report;
import eu.europeana.metis.schema.jibx.AboutType;
import eu.europeana.metis.schema.jibx.RDF;
-
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -28,7 +29,7 @@ public interface Dereferencer {
* @return DereferencedEntity which contains a list of RDF field names with the information associated with it and a report
* containing messages of dereference process.
*/
- List dereferenceEntities(Map,Set> resourceIds);
+ List dereferenceEntities(Map, Set> resourceIds);
/**
* It extracts the references for dereferencing from a RDF file
@@ -36,6 +37,9 @@ public interface Dereferencer {
* @param rdf The RDF where the references are extracted from
* @return A map with a class type associated with a set of extracted references
*/
- Map,Set> extractReferencesForDereferencing(RDF rdf);
+ Map, Set> extractReferencesForDereferencing(RDF rdf);
+ DereferencedEntities dereferenceOwnEntities(Set resourceIds,
+ HashSet reports,
+ Class extends AboutType> classType);
}
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java
index ac6aba46a4..bc5b44834b 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java
@@ -44,304 +44,322 @@
*/
public class DereferencerImpl implements Dereferencer {
- private static final Logger LOGGER = LoggerFactory.getLogger(DereferencerImpl.class);
-
- private final EntityMergeEngine entityMergeEngine;
- private final EntityResolver entityResolver;
- private final DereferenceClient dereferenceClient;
-
- /**
- * Constructor.
- *
- * @param entityMergeEngine The entity merge engine. Cannot be null.
- * @param entityResolver Remove entity resolver: Can be null if we only dereference own entities.
- * @param dereferenceClient Dereference client. Can be null if we don't dereference own entities.
- */
- public DereferencerImpl(EntityMergeEngine entityMergeEngine, EntityResolver entityResolver,
- DereferenceClient dereferenceClient) {
- this.entityMergeEngine = entityMergeEngine;
- this.entityResolver = entityResolver;
- this.dereferenceClient = dereferenceClient;
- }
-
- private static URL checkIfUrlIsValid(HashSet reports, String id) {
- try {
- URI uri = new URI(id);
- return new URL(uri.toString());
- } catch (URISyntaxException | MalformedURLException e) {
- reports.add(Report
- .buildDereferenceIgnore()
- .withStatus(HttpStatus.OK)
- .withValue(id)
- .withException(e)
- .build());
- LOGGER.debug("Invalid enrichment reference found: {}", id);
- return null;
+ private static final Logger LOGGER = LoggerFactory.getLogger(DereferencerImpl.class);
+
+ private final EntityMergeEngine entityMergeEngine;
+ private final EntityResolver entityResolver;
+ private final DereferenceClient dereferenceClient;
+
+ /**
+ * Constructor.
+ *
+ * @param entityMergeEngine The entity merge engine. Cannot be null.
+ * @param entityResolver Remove entity resolver: Can be null if we only dereference own entities.
+ * @param dereferenceClient Dereference client. Can be null if we don't dereference own entities.
+ */
+ public DereferencerImpl(EntityMergeEngine entityMergeEngine, EntityResolver entityResolver,
+ DereferenceClient dereferenceClient) {
+ this.entityMergeEngine = entityMergeEngine;
+ this.entityResolver = entityResolver;
+ this.dereferenceClient = dereferenceClient;
}
- }
-
- private static void setDereferenceStatusInReport(String resourceId, HashSet reports,
- DereferenceResultStatus resultStatus) {
- if (!resultStatus.equals(DereferenceResultStatus.SUCCESS)) {
- String resultMessage;
- switch (resultStatus) {
- case ENTITY_FOUND_XML_XSLT_ERROR:
- resultMessage = "Entity was found, applying the XSLT results in an XML error (either because the entity is malformed or the XSLT is malformed).";
- break;
- case ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS:
- resultMessage = "Entity was found, but the XSLT mapping did not produce a contextual class.";
- break;
- case INVALID_URL:
- resultMessage = "A URL to be dereferenced is invalid.";
- break;
- case NO_VOCABULARY_MATCHING:
- resultMessage = "Could not find a vocabulary matching the URL.";
- break;
- case UNKNOWN_ENTITY:
- resultMessage = "Dereferencing or Coreferencing: the europeana entity does not exist.";
- break;
- case NO_ENTITY_FOR_VOCABULARY:
- resultMessage = "Could not find an entity for a known vocabulary.";
- break;
- default:
- resultMessage = "";
- }
- if (resultStatus.equals(DereferenceResultStatus.INVALID_URL) ||
- resultStatus.equals(DereferenceResultStatus.NO_VOCABULARY_MATCHING) ||
- resultStatus.equals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS)) {
- reports.add(Report
- .buildDereferenceIgnore()
- .withStatus(HttpStatus.OK)
- .withValue(resourceId)
- .withMessage(resultMessage)
- .build());
- } else {
- reports.add(Report
- .buildDereferenceWarn()
- .withStatus(HttpStatus.OK)
- .withValue(resourceId)
- .withMessage(resultMessage)
- .build());
- }
+
+ private static URL checkIfUrlIsValid(HashSet reports, String id) {
+ try {
+ URI uri = new URI(id);
+ return new URL(uri.toString());
+ } catch (URISyntaxException | MalformedURLException e) {
+ reports.add(Report
+ .buildDereferenceIgnore()
+ .withStatus(HttpStatus.OK)
+ .withValue(id)
+ .withException(e)
+ .build());
+ LOGGER.debug("Invalid enrichment reference found: {}", id);
+ return null;
+ }
+ }
+
+ private static void setDereferenceStatusInReport(String resourceId, HashSet reports,
+ DereferenceResultStatus resultStatus) {
+ if (!resultStatus.equals(DereferenceResultStatus.SUCCESS)) {
+ String resultMessage;
+ switch (resultStatus) {
+ case ENTITY_FOUND_XML_XSLT_ERROR:
+ resultMessage = "Entity was found, applying the XSLT results in an XML error"
+ .concat("either because the entity is malformed or the XSLT is malformed).");
+ break;
+ case ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS:
+ resultMessage = "Entity was found, but the XSLT mapping did not produce a contextual class.";
+ break;
+ case INVALID_URL:
+ resultMessage = "A URL to be dereferenced is invalid.";
+ break;
+ case NO_VOCABULARY_MATCHING:
+ resultMessage = "Could not find a vocabulary matching the URL.";
+ break;
+ case UNKNOWN_EUROPEANA_ENTITY:
+ resultMessage = "Dereferencing or Coreferencing: the europeana entity does not exist.";
+ break;
+ case NO_ENTITY_FOR_VOCABULARY:
+ resultMessage = "Could not find an entity for a known vocabulary.";
+ break;
+ case FAILURE:
+ resultMessage = "Dereference or Coreferencing failed.";
+ break;
+ default:
+ resultMessage = "";
+ }
+ if (resultStatus.equals(DereferenceResultStatus.FAILURE)) {
+ reports.add(Report.buildDereferenceError()
+ .withValue(resourceId)
+ .withMessage(resultMessage)
+ .build());
+ } else if (resultStatus.equals(DereferenceResultStatus.INVALID_URL) ||
+ resultStatus.equals(DereferenceResultStatus.NO_VOCABULARY_MATCHING) ||
+ resultStatus.equals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS)) {
+ reports.add(Report
+ .buildDereferenceIgnore()
+ .withStatus(HttpStatus.OK)
+ .withValue(resourceId)
+ .withMessage(resultMessage)
+ .build());
+ } else {
+ reports.add(Report
+ .buildDereferenceWarn()
+ .withStatus(HttpStatus.OK)
+ .withValue(resourceId)
+ .withMessage(resultMessage)
+ .build());
+ }
+ }
}
- }
-
- @Override
- public Set dereference(RDF rdf) {
- // Extract fields from the RDF for dereferencing
- LOGGER.debug(" Extracting fields from RDF for dereferencing...");
- Map, Set> resourceIds = extractReferencesForDereferencing(rdf);
-
- // Get the dereferenced information to add to the RDF using the extracted fields
- LOGGER.debug("Using extracted fields to gather enrichment-via-dereferencing information...");
- List dereferenceInformation = dereferenceEntities(resourceIds);
- Set reports = dereferenceInformation.stream()
- .map(DereferencedEntities::getReportMessages)
- .flatMap(Collection::stream)
- .collect(Collectors.toSet());
-
- // Merge the acquired information into the RDF
- LOGGER.debug("Merging Dereference Information...");
- entityMergeEngine.mergeReferenceEntitiesFromDereferencedEntities(rdf, dereferenceInformation);
-
- // Done.
- LOGGER.debug("Dereference completed.");
- return reports;
- }
-
- @Override
- public List dereferenceEntities(Map, Set> resourceIds) {
-
- // Sanity check.
- if (resourceIds.isEmpty()) {
- return List.of(new DereferencedEntities(Collections.emptyMap(), new HashSet<>()));
+
+ @Override
+ public Set dereference(RDF rdf) {
+ // Extract fields from the RDF for dereferencing
+ LOGGER.debug(" Extracting fields from RDF for dereferencing...");
+ Map, Set> resourceIds = extractReferencesForDereferencing(rdf);
+
+ // Get the dereferenced information to add to the RDF using the extracted fields
+ LOGGER.debug("Using extracted fields to gather enrichment-via-dereferencing information...");
+ List dereferenceInformation = dereferenceEntities(resourceIds);
+ Set reports = dereferenceInformation.stream()
+ .map(DereferencedEntities::getReportMessages)
+ .flatMap(Collection::stream)
+ .collect(Collectors.toSet());
+
+ // Merge the acquired information into the RDF
+ LOGGER.debug("Merging Dereference Information...");
+ entityMergeEngine.mergeReferenceEntitiesFromDereferencedEntities(rdf, dereferenceInformation);
+
+ // Done.
+ LOGGER.debug("Dereference completed.");
+ return reports;
}
- //TODO Using TreeMap to sort out the elements because unit tests were failing due to "incorrect" order
- //TODO The order should not matter, the unit tests need fixing
- //TODO There's already ticket MET-5065 to handle it
-
- // First try to get them from our own entity collection database.
- TreeMap, Set> mappedReferenceTerms = new TreeMap<>(
- Comparator.comparing(Class::getName));
- TreeMap, DereferencedEntities> dereferencedResultEntities = new TreeMap<>(
- Comparator.comparing(Class::getName));
- resourceIds.forEach((key, value) -> {
- HashSet reports = new HashSet<>();
- Set referenceTermSet = setUpReferenceTermSet(value, reports);
- mappedReferenceTerms.put(key, referenceTermSet);
- final DereferencedEntities dereferencedOwnEntities = dereferenceOwnEntities(referenceTermSet, reports, key);
- dereferencedResultEntities.put(key, dereferencedOwnEntities);
- });
-
- final Set foundOwnEntityIds = dereferencedResultEntities
- .values().stream()
- .map(DereferencedEntities::getReferenceTermListMap)
- .map(Map::values).flatMap(Collection::stream).flatMap(Collection::stream)
- .map(EnrichmentBase::getAbout)
- .collect(Collectors.toSet());
-
- // For the remaining ones, get them from the dereference service.
- for (Map.Entry, Set> entry : mappedReferenceTerms.entrySet()) {
- DereferencedEntities dereferencedEntities;
- Set notFoundOwnReferenceTerms = entry.getValue().stream().filter(
- referenceTerm -> !foundOwnEntityIds.contains(referenceTerm.getReference().toString())).collect(
- Collectors.toSet());
- if (notFoundOwnReferenceTerms.isEmpty()) {
- continue;
- }
-
- if (entry.getKey().equals(Aggregation.class)) {
- dereferencedEntities = dereferenceAggregation(notFoundOwnReferenceTerms, entry.getKey());
- } else {
- dereferencedEntities = dereferenceExternalEntity(notFoundOwnReferenceTerms, entry.getKey());
- }
-
- updateDereferencedEntitiesMap(dereferencedResultEntities, entry.getKey(), dereferencedEntities);
+ @Override
+ public List dereferenceEntities(Map, Set> resourceIds) {
+
+ // Sanity check.
+ if (resourceIds.isEmpty()) {
+ return List.of(new DereferencedEntities(Collections.emptyMap(), new HashSet<>()));
+ }
+
+ //TODO Using TreeMap to sort out the elements because unit tests were failing due to "incorrect" order
+ //TODO The order should not matter, the unit tests need fixing
+ //TODO There's already ticket MET-5065 to handle it
+
+ // First try to get them from our own entity collection database.
+ TreeMap, Set> mappedReferenceTerms = new TreeMap<>(
+ Comparator.comparing(Class::getName));
+ TreeMap, DereferencedEntities> dereferencedResultEntities = new TreeMap<>(
+ Comparator.comparing(Class::getName));
+ resourceIds.forEach((key, value) -> {
+ HashSet reports = new HashSet<>();
+ Set referenceTermSet = setUpReferenceTermSet(value, reports);
+ mappedReferenceTerms.put(key, referenceTermSet);
+ final DereferencedEntities dereferencedOwnEntities = dereferenceOwnEntities(referenceTermSet, reports, key);
+ dereferencedResultEntities.put(key, dereferencedOwnEntities);
+ });
+
+ final Set foundOwnEntityIds = dereferencedResultEntities
+ .values().stream()
+ .map(DereferencedEntities::getReferenceTermListMap)
+ .map(Map::values).flatMap(Collection::stream).flatMap(Collection::stream)
+ .map(EnrichmentBase::getAbout)
+ .collect(Collectors.toSet());
+
+ // For the remaining ones, get them from the dereference service.
+ for (Map.Entry, Set> entry : mappedReferenceTerms.entrySet()) {
+ DereferencedEntities dereferencedEntities;
+ Set notFoundOwnReferenceTerms = entry.getValue().stream().filter(
+ referenceTerm -> !foundOwnEntityIds.contains(referenceTerm.getReference().toString())).collect(
+ Collectors.toSet());
+ if (notFoundOwnReferenceTerms.isEmpty()) {
+ continue;
+ }
+
+ if (entry.getKey().equals(Aggregation.class)) {
+ dereferencedEntities = dereferenceAggregation(notFoundOwnReferenceTerms, entry.getKey());
+ } else {
+ dereferencedEntities = dereferenceExternalEntity(notFoundOwnReferenceTerms, entry.getKey());
+ }
+
+ updateDereferencedEntitiesMap(dereferencedResultEntities, entry.getKey(), dereferencedEntities);
+ }
+ // Done.
+ return new ArrayList<>(dereferencedResultEntities.values());
}
- // Done.
- return new ArrayList<>(dereferencedResultEntities.values());
- }
- private DereferencedEntities dereferenceAggregation(Set referenceTerms, Class extends AboutType> classType) {
+ private DereferencedEntities dereferenceAggregation(Set referenceTerms, Class extends AboutType> classType) {
- DereferencedEntities result = dereferenceEntitiesWithUri(referenceTerms,
- new HashSet<>(), classType);
+ DereferencedEntities result = dereferenceEntitiesWithUri(referenceTerms,
+ new HashSet<>(), classType);
- //Collect references that returned empty lists values for references that we checked with uri
- Set remainingReferences = referenceTerms.stream().filter(
- referenceTerm -> result.getReferenceTermListMap().get(referenceTerm).isEmpty())
- .collect(Collectors.toSet());
+ //Collect references that returned empty lists values for references that we checked with uri
+ Set remainingReferences = referenceTerms.stream().filter(
+ referenceTerm -> result.getReferenceTermListMap().get(referenceTerm).isEmpty())
+ .collect(Collectors.toSet());
- //If there are any remaining references then do external dereferencing
- if (CollectionUtils.isNotEmpty(remainingReferences)) {
- DereferencedEntities aggregationRemainingDereferencingResult = dereferenceExternalEntity(remainingReferences, classType);
- result.getReferenceTermListMap().putAll(aggregationRemainingDereferencingResult.getReferenceTermListMap());
- result.getReportMessages().addAll(aggregationRemainingDereferencingResult.getReportMessages());
- }
+ //If there are any remaining references then do external dereferencing
+ if (CollectionUtils.isNotEmpty(remainingReferences)) {
+ DereferencedEntities aggregationRemainingDereferencingResult = dereferenceExternalEntity(remainingReferences, classType);
+ result.getReferenceTermListMap().putAll(aggregationRemainingDereferencingResult.getReferenceTermListMap());
+ result.getReportMessages().addAll(aggregationRemainingDereferencingResult.getReportMessages());
+ }
- return result;
- }
+ return result;
+ }
- @Override
- public Map, Set> extractReferencesForDereferencing(RDF rdf) {
- return DereferenceUtils.extractReferencesForDereferencing(rdf);
- }
+ @Override
+ public Map, Set> extractReferencesForDereferencing(RDF rdf) {
+ return DereferenceUtils.extractReferencesForDereferencing(rdf);
+ }
- private DereferencedEntities dereferenceOwnEntities(Set resourceIds,
- HashSet reports,
- Class extends AboutType> classType) {
- if (entityResolver == null) {
- return new DereferencedEntities(Collections.emptyMap(), new HashSet<>());
+ public DereferencedEntities dereferenceOwnEntities(Set resourceIds,
+ HashSet reports,
+ Class extends AboutType> classType) {
+ if (entityResolver == null) {
+ return new DereferencedEntities(Collections.emptyMap(), new HashSet<>());
+ }
+ try {
+ Map> result = new HashMap<>();
+ Set ownEntities = resourceIds.stream()
+ .filter(id -> EntityResolver.europeanaLinkPattern.matcher(id.getReference().toString()).matches())
+ .collect(Collectors.toSet());
+ entityResolver.resolveById(ownEntities)
+ .forEach((key, value) -> result.put(key, List.of(value)));
+ ownEntities.stream().filter(id -> result.get(id) == null || result.get(id).isEmpty())
+ .forEach(notFoundOwnId -> {
+ setDereferenceStatusInReport(notFoundOwnId.getReference().toString(),
+ reports, DereferenceResultStatus.UNKNOWN_EUROPEANA_ENTITY);
+ result.putIfAbsent(notFoundOwnId, Collections.emptyList());
+ });
+ return new DereferencedEntities(result, reports, classType);
+ } catch (Exception e) {
+ return handleDereferencingException(resourceIds, reports, e, classType);
+ }
}
- try {
- Map> result = new HashMap<>();
- entityResolver.resolveById(resourceIds).forEach((key, value) -> result.put(key, List.of(value)));
- return new DereferencedEntities(result, reports, classType);
- } catch (Exception e) {
- return handleDereferencingException(resourceIds, reports, e, classType);
+
+ private DereferencedEntities dereferenceEntitiesWithUri(Set resourceIds,
+ HashSet reports,
+ Class extends AboutType> classType) {
+ if (entityResolver == null) {
+ return new DereferencedEntities(Collections.emptyMap(), new HashSet<>());
+ }
+ try {
+ return new DereferencedEntities(new HashMap<>(entityResolver.resolveByUri(resourceIds)), reports, classType);
+ } catch (Exception e) {
+ return handleDereferencingException(resourceIds, reports, e, classType);
+ }
}
- }
- private DereferencedEntities dereferenceEntitiesWithUri(Set resourceIds,
- HashSet reports,
- Class extends AboutType> classType) {
- if (entityResolver == null) {
- return new DereferencedEntities(Collections.emptyMap(), new HashSet<>());
+ private DereferencedEntities dereferenceExternalEntity(Set referenceTerms,
+ Class extends AboutType> classType) {
+ HashSet reports = new HashSet<>();
+ // Check that there is something to do.
+ if (dereferenceClient == null) {
+ return new DereferencedEntities(Collections.emptyMap(), reports, classType);
+ }
+
+ // Perform the dereferencing.
+ EnrichmentResultList result;
+ Map> resultMap = new HashMap<>();
+ for (ReferenceTerm referenceTerm : referenceTerms) {
+ String resourceId = referenceTerm.getReference().toString();
+ try {
+ LOGGER.debug("Dereference external entity processing {}", resourceId);
+ result = retryableExternalRequestForNetworkExceptions(
+ () -> dereferenceClient.dereference(resourceId));
+ DereferenceResultStatus resultStatus = Optional.ofNullable(result)
+ .map(EnrichmentResultList::getEnrichmentBaseResultWrapperList)
+ .orElseGet(Collections::emptyList).stream()
+ .map(EnrichmentResultBaseWrapper::getDereferenceStatus)
+ .filter(Objects::nonNull).findFirst()
+ .orElse(DereferenceResultStatus.FAILURE);
+
+ setDereferenceStatusInReport(resourceId, reports, resultStatus);
+ } catch (BadRequest e) {
+ // We are forgiving for these errors
+ LOGGER.warn("ResourceId {}, failed", resourceId, e);
+ reports.add(Report
+ .buildDereferenceWarn()
+ .withStatus(HttpStatus.BAD_REQUEST)
+ .withValue(resourceId)
+ .withException(e)
+ .build());
+ result = null;
+ } catch (Exception e) {
+ DereferenceException dereferenceException = new DereferenceException(
+ "Exception occurred while trying to perform dereferencing.", e);
+ reports.add(Report
+ .buildDereferenceError()
+ .withValue(resourceId)
+ .withException(dereferenceException)
+ .build());
+ result = null;
+ }
+ resultMap.put(referenceTerm, Optional.ofNullable(result).map(EnrichmentResultList::getEnrichmentBaseResultWrapperList)
+ .orElseGet(Collections::emptyList).stream()
+ .map(EnrichmentResultBaseWrapper::getEnrichmentBaseList).filter(Objects::nonNull)
+ .flatMap(List::stream).collect(Collectors.toList()));
+ }
+
+ // Return the result.
+ return new DereferencedEntities(resultMap, reports, classType);
}
- try {
- return new DereferencedEntities(new HashMap<>(entityResolver.resolveByUri(resourceIds)), reports, classType);
- } catch (Exception e) {
- return handleDereferencingException(resourceIds, reports, e, classType);
+
+ private Set setUpReferenceTermSet(Set resourcesIds, HashSet reports) {
+ return resourcesIds.stream()
+ .map(id -> checkIfUrlIsValid(reports, id))
+ .filter(Objects::nonNull)
+ .map(validateUrl -> new ReferenceTermImpl(validateUrl, new HashSet<>()))
+ .collect(Collectors.toSet());
}
- }
-
- private DereferencedEntities dereferenceExternalEntity(Set referenceTerms,
- Class extends AboutType> classType) {
- HashSet reports = new HashSet<>();
- // Check that there is something to do.
- if (dereferenceClient == null) {
- return new DereferencedEntities(Collections.emptyMap(), reports, classType);
+
+ private void updateDereferencedEntitiesMap(TreeMap, DereferencedEntities> mapToUpdate,
+ Class extends AboutType> classType,
+ DereferencedEntities elementToUpdateWith) {
+
+ DereferencedEntities foundEntities = mapToUpdate.get(classType);
+ foundEntities.getReferenceTermListMap().putAll(elementToUpdateWith.getReferenceTermListMap());
+ foundEntities.getReportMessages().addAll(elementToUpdateWith.getReportMessages());
+
}
- // Perform the dereferencing.
- EnrichmentResultList result;
- Map> resultMap = new HashMap<>();
- for (ReferenceTerm referenceTerm : referenceTerms) {
- String resourceId = referenceTerm.getReference().toString();
- try {
- LOGGER.debug("== Processing {}", resourceId);
- result = retryableExternalRequestForNetworkExceptions(
- () -> dereferenceClient.dereference(resourceId));
- DereferenceResultStatus resultStatus = Optional.ofNullable(result)
- .map(EnrichmentResultList::getEnrichmentBaseResultWrapperList)
- .orElseGet(Collections::emptyList).stream()
- .map(EnrichmentResultBaseWrapper::getDereferenceStatus)
- .filter(Objects::nonNull).findFirst()
- .orElse(DereferenceResultStatus.UNKNOWN_ENTITY);
-
- setDereferenceStatusInReport(resourceId, reports, resultStatus);
- } catch (BadRequest e) {
- // We are forgiving for these errors
- LOGGER.warn("ResourceId {}, failed", resourceId, e);
- reports.add(Report
- .buildDereferenceWarn()
- .withStatus(HttpStatus.BAD_REQUEST)
- .withValue(resourceId)
- .withException(e)
- .build());
- result = null;
- } catch (Exception e) {
+ private DereferencedEntities handleDereferencingException(Set resourceIds, HashSet reports,
+ Exception exception, Class extends AboutType> classType) {
DereferenceException dereferenceException = new DereferenceException(
- "Exception occurred while trying to perform dereferencing.", e);
+ "Exception occurred while trying to perform dereferencing.", exception);
reports.add(Report
- .buildDereferenceWarn()
- .withStatus(HttpStatus.OK)
- .withValue(resourceId)
- .withException(dereferenceException)
- .build());
- result = null;
- }
- resultMap.put(referenceTerm, Optional.ofNullable(result).map(EnrichmentResultList::getEnrichmentBaseResultWrapperList)
- .orElseGet(Collections::emptyList).stream()
- .map(EnrichmentResultBaseWrapper::getEnrichmentBaseList).filter(Objects::nonNull)
- .flatMap(List::stream).collect(Collectors.toList()));
+ .buildDereferenceWarn()
+ .withStatus(HttpStatus.OK)
+ .withValue(resourceIds.stream()
+ .map(resourceId -> resourceId.getReference().toString())
+ .collect(Collectors.joining(",")))
+ .withException(dereferenceException)
+ .build());
+ return new DereferencedEntities(new HashMap<>(), reports, classType);
}
-
- // Return the result.
- return new DereferencedEntities(resultMap, reports, classType);
- }
-
- private Set setUpReferenceTermSet(Set resourcesIds, HashSet reports) {
- return resourcesIds.stream()
- .map(id -> checkIfUrlIsValid(reports, id))
- .filter(Objects::nonNull)
- .map(validateUrl -> new ReferenceTermImpl(validateUrl, new HashSet<>()))
- .collect(Collectors.toSet());
- }
-
- private void updateDereferencedEntitiesMap(TreeMap, DereferencedEntities> mapToUpdate,
- Class extends AboutType> classType,
- DereferencedEntities elementToUpdateWith) {
-
- DereferencedEntities foundEntities = mapToUpdate.get(classType);
- foundEntities.getReferenceTermListMap().putAll(elementToUpdateWith.getReferenceTermListMap());
- foundEntities.getReportMessages().addAll(elementToUpdateWith.getReportMessages());
-
- }
-
- private DereferencedEntities handleDereferencingException(Set resourceIds, HashSet reports,
- Exception exception, Class extends AboutType> classType) {
- DereferenceException dereferenceException = new DereferenceException(
- "Exception occurred while trying to perform dereferencing.", exception);
- reports.add(Report
- .buildDereferenceWarn()
- .withStatus(HttpStatus.OK)
- .withValue(resourceIds.stream()
- .map(resourceId -> resourceId.getReference().toString())
- .collect(Collectors.joining(",")))
- .withException(dereferenceException)
- .build());
- return new DereferencedEntities(new HashMap<>(), reports, classType);
- }
}
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParser.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParser.java
index 405cfd9dcf..8cdac94f3a 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParser.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParser.java
@@ -47,11 +47,14 @@ public Set parseSearchTerms(RDF rdf) {
//Proxy search terms
final Set resultSearchTermsSet = getFieldValueSet(ProxyFieldType.values(),
RdfEntityUtils.getProviderProxies(rdf));
- resultSearchTermsSet
- .addAll(getFieldValueSet(AggregationFieldType.values(), rdf.getAggregationList()));
+ resultSearchTermsSet.addAll(getAggregationSearchTerms(rdf));
return resultSearchTermsSet;
}
+ public Set getAggregationSearchTerms(RDF rdf) {
+ return getFieldValueSet(AggregationFieldType.values(), rdf.getAggregationList());
+ }
+
private Set getFieldValueSet(FieldType[] fieldTypes,
List aboutTypes) {
final Map>> fieldValueFieldTypesMap = new HashMap<>();
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/report/Report.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/report/Report.java
index 00502ec72e..121d25dcbf 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/report/Report.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/report/Report.java
@@ -106,7 +106,7 @@ public static Report buildEnrichmentWarn() {
}
/**
- * Create a report message for enrichment with error status and error type
+ * Create a report message for enrichment with error type
*
* @return a reference to this Builder
*/
@@ -135,6 +135,16 @@ public static Report buildDereferenceWarn() {
.withMessageType(Type.WARN);
}
+ /**
+ * Create a report message for dereference with error type
+ *
+ * @return a reference to this Builder
+ */
+ public static Report buildDereferenceError() {
+ return new Report().withMode(Mode.DEREFERENCE)
+ .withMessageType(Type.ERROR);
+ }
+
/**
* Sets the {@code status} and returns a reference to this Builder enabling method chaining.
*
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java
index cdbb416946..2b75ef29c3 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java
@@ -11,34 +11,25 @@
import eu.europeana.enrichment.api.external.model.Part;
import eu.europeana.enrichment.api.external.model.Place;
import eu.europeana.enrichment.api.external.model.TimeSpan;
-
import eu.europeana.enrichment.api.internal.AggregationFieldType;
+import eu.europeana.enrichment.api.internal.FieldType;
import eu.europeana.enrichment.api.internal.ProxyFieldType;
import eu.europeana.enrichment.api.internal.ReferenceTerm;
import eu.europeana.enrichment.api.internal.ReferenceTermContext;
import eu.europeana.enrichment.api.internal.SearchTermContext;
import eu.europeana.enrichment.rest.client.dereference.DereferencedEntities;
-
+import eu.europeana.metis.schema.jibx.AboutType;
import eu.europeana.metis.schema.jibx.AgentType;
import eu.europeana.metis.schema.jibx.Aggregation;
import eu.europeana.metis.schema.jibx.Alt;
-import eu.europeana.metis.schema.jibx.Concept.Choice;
-import eu.europeana.metis.schema.jibx.Date;
-
-import eu.europeana.metis.schema.jibx.Lat;
-import eu.europeana.metis.schema.jibx.PlaceType;
import eu.europeana.metis.schema.jibx.AltLabel;
-import eu.europeana.metis.schema.jibx.HasPart;
-import eu.europeana.metis.schema.jibx.IsPartOf;
-import eu.europeana.metis.schema.jibx._Long;
-import eu.europeana.metis.schema.jibx.Note;
-import eu.europeana.metis.schema.jibx.PrefLabel;
-import eu.europeana.metis.schema.jibx.SameAs;
import eu.europeana.metis.schema.jibx.Begin;
import eu.europeana.metis.schema.jibx.BiographicalInformation;
-import eu.europeana.metis.schema.jibx.ProfessionOrOccupation;
-import eu.europeana.metis.schema.jibx.PlaceOfBirth;
-import eu.europeana.metis.schema.jibx.PlaceOfDeath;
+import eu.europeana.metis.schema.jibx.BroadMatch;
+import eu.europeana.metis.schema.jibx.Broader;
+import eu.europeana.metis.schema.jibx.CloseMatch;
+import eu.europeana.metis.schema.jibx.Concept.Choice;
+import eu.europeana.metis.schema.jibx.Date;
import eu.europeana.metis.schema.jibx.DateOfBirth;
import eu.europeana.metis.schema.jibx.DateOfDeath;
import eu.europeana.metis.schema.jibx.DateOfEstablishment;
@@ -47,24 +38,29 @@
import eu.europeana.metis.schema.jibx.ExactMatch;
import eu.europeana.metis.schema.jibx.Gender;
import eu.europeana.metis.schema.jibx.HasMet;
+import eu.europeana.metis.schema.jibx.HasPart;
import eu.europeana.metis.schema.jibx.HiddenLabel;
import eu.europeana.metis.schema.jibx.Identifier;
import eu.europeana.metis.schema.jibx.InScheme;
import eu.europeana.metis.schema.jibx.IsNextInSequence;
+import eu.europeana.metis.schema.jibx.IsPartOf;
import eu.europeana.metis.schema.jibx.IsRelatedTo;
+import eu.europeana.metis.schema.jibx.Lat;
import eu.europeana.metis.schema.jibx.NarrowMatch;
import eu.europeana.metis.schema.jibx.Narrower;
import eu.europeana.metis.schema.jibx.Notation;
+import eu.europeana.metis.schema.jibx.Note;
+import eu.europeana.metis.schema.jibx.PlaceOfBirth;
+import eu.europeana.metis.schema.jibx.PlaceOfDeath;
+import eu.europeana.metis.schema.jibx.PlaceType;
+import eu.europeana.metis.schema.jibx.PrefLabel;
+import eu.europeana.metis.schema.jibx.ProfessionOrOccupation;
import eu.europeana.metis.schema.jibx.RDF;
import eu.europeana.metis.schema.jibx.Related;
import eu.europeana.metis.schema.jibx.RelatedMatch;
+import eu.europeana.metis.schema.jibx.SameAs;
import eu.europeana.metis.schema.jibx.TimeSpanType;
-import eu.europeana.enrichment.api.internal.FieldType;
-import eu.europeana.metis.schema.jibx.AboutType;
-import eu.europeana.metis.schema.jibx.BroadMatch;
-import eu.europeana.metis.schema.jibx.Broader;
-import eu.europeana.metis.schema.jibx.CloseMatch;
-
+import eu.europeana.metis.schema.jibx._Long;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -379,7 +375,7 @@ private static T convertAndAddEn
return convertedEntity;
}
- private static AboutType convertAndAddEntity(RDF rdf, EnrichmentBase enrichmentBase) {
+ public static AboutType convertAndAddEntity(RDF rdf, EnrichmentBase enrichmentBase) {
// Convert the entity and add it to the RDF.
final AboutType entity;
diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java
index 6bd952bf58..954ac69f71 100644
--- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java
+++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java
@@ -166,7 +166,7 @@ public static List getProviderProxies(RDF rdf) {
.collect(Collectors.toList());
}
- private static boolean isEuropeanaProxy(ProxyType proxy) {
+ public static boolean isEuropeanaProxy(ProxyType proxy) {
return proxy.getEuropeanaProxy() != null && proxy.getEuropeanaProxy().isEuropeanaProxy();
}
diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java
index b1e163406f..d0d06a1945 100644
--- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java
+++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java
@@ -1,21 +1,5 @@
package eu.europeana.enrichment.rest.client;
-import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
-import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
-import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
-import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.common.ConsoleNotifier;
import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer;
@@ -34,10 +18,6 @@
import eu.europeana.enrichment.rest.client.report.Type;
import eu.europeana.metis.schema.convert.SerializationException;
import eu.europeana.metis.schema.jibx.RDF;
-import java.io.IOException;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.stream.Stream;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -48,443 +28,563 @@
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
+import java.io.IOException;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.stream.Stream;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
+import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
class EnrichmentWorkerImplTest {
- private static final Logger LOGGER = LoggerFactory.getLogger(EnrichmentWorkerImplTest.class);
- private static WireMockServer wireMockServer;
-
- @BeforeAll
- static void createWireMock() {
- wireMockServer = new WireMockServer(wireMockConfig()
- .dynamicPort()
- .enableBrowserProxying(true)
- .notifier(new ConsoleNotifier(true)));
- wireMockServer.start();
- JvmProxyConfigurer.configureFor(wireMockServer);
- }
-
- @AfterAll
- static void tearDownWireMock() {
- wireMockServer.stop();
- }
-
- private static Stream providedInputRecords() {
- return Stream.of(
- Arguments.of(getResourceFileContent("enrichment/sample_enrichment_exception.rdf"), RecordStatus.STOP),
- Arguments.of(getResourceFileContent("enrichment/sample_dereference_not_found.rdf"), RecordStatus.CONTINUE),
- Arguments.of(getResourceFileContent("enrichment/sample_dereference_redirect.rdf"), RecordStatus.CONTINUE),
- Arguments.of(getResourceFileContent("enrichment/sample_enrichment_noentity.rdf"), RecordStatus.CONTINUE),
- Arguments.of(getResourceFileContent("enrichment/sample_enrichment_success.rdf"), RecordStatus.CONTINUE)
- );
- }
-
- private static String getResourceFileContent(String fileName) {
- try {
- return new String(
- EnrichmentWorkerImplTest.class.getClassLoader().getResourceAsStream(fileName).readAllBytes()
- );
- } catch (IOException ioException) {
- return "";
+ private static final Logger LOGGER = LoggerFactory.getLogger(EnrichmentWorkerImplTest.class);
+ private static WireMockServer wireMockServer;
+
+ @BeforeAll
+ static void createWireMock() {
+ wireMockServer = new WireMockServer(wireMockConfig()
+ .dynamicPort()
+ .enableBrowserProxying(true)
+ .notifier(new ConsoleNotifier(true)));
+ wireMockServer.start();
+ JvmProxyConfigurer.configureFor(wireMockServer);
+ }
+
+ @AfterAll
+ static void tearDownWireMock() {
+ wireMockServer.stop();
}
- }
-
- @ParameterizedTest
- @MethodSource("providedInputRecords")
- void testEnrichmentWorkerHappyFlow(String inputRecord, RecordStatus recordStatus)
- throws DereferenceException, EnrichmentException {
- setDereferenceMocks();
- setEntityAPIMocks();
-
- TreeSet modeSetWithBoth = new TreeSet<>();
- modeSetWithBoth.add(Mode.ENRICHMENT);
- modeSetWithBoth.add(Mode.DEREFERENCE);
-
- EnricherProvider enricherProvider = new EnricherProvider();
- enricherProvider.setEnrichmentPropertiesValues("http://localhost:" + wireMockServer.port() + "/entitymgmt",
- "http://localhost:" + wireMockServer.port() + "/entity",
- "api2demo");
-
- final Enricher enricher = enricherProvider.create();
-
- DereferencerProvider dereferencerProvider = new DereferencerProvider();
- dereferencerProvider.setEnrichmentPropertiesValues("http://entity-api.mock/entity",
- "http://entity-api.mock/entity",
- "api2demo");
-
- dereferencerProvider.setDereferenceUrl("http://dereference-rest.mock");
- final Dereferencer dereferencer = dereferencerProvider.create();
-
- // Execute the worker
- final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(dereferencer, enricher);
-
- ProcessedResult output = worker.process(inputRecord, modeSetWithBoth);
-
- LOGGER.info("REPORT: {}\n\n", output.getReport());
- LOGGER.info("RECORD: {}\n\n", output.getProcessedRecord());
- LOGGER.info("STATUS: {}", output.getRecordStatus());
- assertEquals(recordStatus, output.getRecordStatus());
- }
-
- private static void setEntityAPIMocks() {
- wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/concept/base/84?wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept-base.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Religion&lang=en&type=concept"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Piotras%20Kalabuchovas&type=agent"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-agent-nomatch.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Paranguaricutirimicuaro&lang=en&type=concept"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept-nomatch.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(
- get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Lietuvos%20Centrinis%20Valstyb%C3%A9s%20Archyvas&type=organization"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-organization.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(
- get(urlEqualTo("/entity/enrich?wskey=api2demo&text=EFG%20-%20The%20European%20Film%20Gateway&lang=en&type=organization"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-organization.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/organization/base/1482250000004671158?wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-organization-base.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=1957&lang=en&type=timespan"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-timespan.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo(
- "/entity/resolve?uri=http://dbpedia.org/resource/Lithuanian_Soviet_Socialist_Republic_%25281918%25E2%2580%25931919%2529&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-nomatch.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/resolve?uri=https://sws.geonames.org/597427/&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept.json"))
- .withStatus(HttpStatus.OK.value())));
-
- wireMockServer.stubFor(get(urlEqualTo("/entity/resolve?uri=http://vocab.getty.edu/aat/300136900&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entity/resolve?uri=http://dbpedia.org/resource/Lithuania&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-place.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Muziek&lang=nl&type=concept"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept-ii.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/concept/base/62?wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept-ii-base.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entity/enrich?wskey=api2demo&text=G%C3%BCiro&lang=es&type=concept"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-concept-iii.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entity/resolve?uri=http://www.mimo-db.eu/InstrumentsKeywords/0&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept-ii.json"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/entity/resolve?uri=http://www.mimo-db.eu/InstrumentsKeywords/3052&wskey=api2demo"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/json")
- .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept-ii.json"))
- .withStatus(HttpStatus.OK.value())));
- }
-
- private static void setDereferenceMocks() {
- wireMockServer.stubFor(get(urlEqualTo("/dereference?uri=https%3A%2F%2Fsws.geonames.org%2F597427%2F"))
- .withHost(equalTo("dereference-rest.mock"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/xml")
- .withBody(getResourceFileContent("dereference/dereference-geoname.xml"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/dereference?uri=http%3A%2F%2Fvocab.getty.edu%2Faat%2F300136900"))
- .withHost(equalTo("dereference-rest.mock"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/xml")
- .withBody(getResourceFileContent("dereference/dereference-vocabulary.xml"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/dereference?uri=http%3A%2F%2Fwww.mimo-db.eu%2FInstrumentsKeywords%2F3052"))
- .withHost(equalTo("dereference-rest.mock"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/xml")
- .withBody(getResourceFileContent("dereference/dereference-normal.xml"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/dereference?uri=http%3A%2F%2Fwww.mimo-db.eu%2FInstrumentsKeywords%2F0"))
- .withHost(equalTo("dereference-rest.mock"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/xml")
- .withBody(getResourceFileContent("dereference/dereference-no-entity.xml"))
- .withStatus(HttpStatus.OK.value())));
- wireMockServer.stubFor(get(urlEqualTo("/dereference?uri=http%3A%2F%2Fsemantics.gr%2Fauthorities%2Fthematic_tags%2F994210004"))
- .withHost(equalTo("dereference-rest.mock"))
- .willReturn(aResponse()
- .withHeader("Content-Type", "application/xml")
- .withBody(getResourceFileContent("dereference/dereference-normal-redirect.xml"))
- .withStatus(HttpStatus.OK.value())));
- }
-
- @Test
- void testEnrichmentWorkerHappyFlow() throws DereferenceException, EnrichmentException {
- TreeSet modeSetWithOnlyEnrichment = new TreeSet<>();
- TreeSet modeSetWithOnlyDereference = new TreeSet<>();
- TreeSet modeSetWithBoth = new TreeSet<>();
-
- modeSetWithOnlyEnrichment.add(Mode.ENRICHMENT);
- testEnrichmentWorkerHappyFlow(modeSetWithOnlyEnrichment);
- modeSetWithOnlyDereference.add(Mode.DEREFERENCE);
- testEnrichmentWorkerHappyFlow(modeSetWithOnlyDereference);
- modeSetWithBoth.add(Mode.ENRICHMENT);
- modeSetWithBoth.add(Mode.DEREFERENCE);
- testEnrichmentWorkerHappyFlow(modeSetWithBoth);
- }
-
- @Test
- void testEnrichmentWorkerNullFlow() throws DereferenceException, EnrichmentException {
- TreeSet modeSetWithOnlyEnrichment = new TreeSet<>();
- TreeSet modeSetWithOnlyDereference = new TreeSet<>();
- TreeSet modeSetWithBoth = new TreeSet<>();
-
- modeSetWithOnlyEnrichment.add(Mode.ENRICHMENT);
- testEnrichmentWorkerNullFlow(modeSetWithOnlyEnrichment);
- modeSetWithOnlyDereference.add(Mode.DEREFERENCE);
- testEnrichmentWorkerNullFlow(modeSetWithOnlyDereference);
- modeSetWithBoth.add(Mode.ENRICHMENT);
- modeSetWithBoth.add(Mode.DEREFERENCE);
- testEnrichmentWorkerNullFlow(modeSetWithBoth);
- }
-
- private void testEnrichmentWorkerHappyFlow(Set modes)
- throws DereferenceException, EnrichmentException {
-
- // Create enricher and mock it.
- final Enricher enricher = mock(EnricherImpl.class);
-
- final Dereferencer dereferencer = mock(DereferencerImpl.class);
-
- // Execute the worker
- final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(dereferencer, enricher);
- final RDF inputRdf = new RDF();
- worker.process(inputRdf, modes);
-
- // Counters of method calls depend on the mode
- final boolean doDereferencing = modes.contains(Mode.DEREFERENCE);
- final boolean doEnrichment = modes.contains(Mode.ENRICHMENT);
-
- // Check the performed tasks
- verifyDereferencingHappyFlow(doDereferencing, dereferencer, inputRdf);
- verifyEnrichmentHappyFlow(doEnrichment, enricher, inputRdf);
- // verifyMergeHappyFlow(doEnrichment, doDereferencing, entityMergeEngine);
- }
-
- private void testEnrichmentWorkerNullFlow(Set modes)
- throws DereferenceException, EnrichmentException {
-
- // Create enrichment worker and mock the enrichment and dereferencing results.
- final Enricher enricher = mock(EnricherImpl.class);
-
- final Dereferencer dereferencer = mock(DereferencerImpl.class);
-
- // Execute the worker
- final EnrichmentWorkerImpl worker =
- spy(new EnrichmentWorkerImpl(dereferencer, enricher));
- final RDF inputRdf = new RDF();
- worker.process(inputRdf, modes);
-
- // Counters of method calls depend on the mode
- final boolean doDereferencing = modes.contains(Mode.DEREFERENCE);
- final boolean doEnrichment = modes.contains(Mode.ENRICHMENT);
-
- // Check the performed tasks
- verifyDereferencingNullFlow(doDereferencing, dereferencer, inputRdf);
- verifyEnrichmentNullFlow(doEnrichment, enricher, inputRdf);
-
- }
-
- // Verify dereference related calls
- private void verifyDereferencingHappyFlow(boolean doDereferencing, Dereferencer dereferencer,
- RDF inputRdf) throws DereferenceException {
- if (doDereferencing) {
- verify(dereferencer, times(1)).dereference(inputRdf);
-
- } else {
- verify(dereferencer, never()).dereference(any());
+
+ private static Stream providedInputRecords() {
+ return Stream.of(
+ Arguments.of(getResourceFileContent("enrichment/sample_enrichment_exception.rdf"), RecordStatus.STOP),
+ Arguments.of(getResourceFileContent("enrichment/sample_dereference_not_found.rdf"), RecordStatus.STOP),
+ Arguments.of(getResourceFileContent("enrichment/sample_dereference_redirect.rdf"), RecordStatus.CONTINUE),
+ Arguments.of(getResourceFileContent("enrichment/sample_enrichment_noentity.rdf"), RecordStatus.CONTINUE),
+ Arguments.of(getResourceFileContent("enrichment/sample_enrichment_failure.rdf"), RecordStatus.STOP),
+ Arguments.of(getResourceFileContent("enrichment/sample_enrichment_success.rdf"), RecordStatus.CONTINUE)
+ );
}
- }
- private void verifyDereferencingNullFlow(boolean doDereferencing, Dereferencer dereferencer,
- RDF inputRdf) throws DereferenceException {
- if (doDereferencing) {
+ private static String getResourceFileContent(String fileName) {
+ try {
+ return new String(
+ EnrichmentWorkerImplTest.class.getClassLoader().getResourceAsStream(fileName).readAllBytes()
+ );
+ } catch (IOException ioException) {
+ return "";
+ }
+ }
- verify(dereferencer, times(1)).dereference(inputRdf);
+ private static void setEntityAPIMocks() {
+ wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/concept/base/84?wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-mgmt-concept-base.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/concept/base/3401?wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-mgmt-concept-base-ii.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Religion&lang=en&type=concept"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Piotras%20Kalabuchovas&type=agent"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-agent-nomatch.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/entity/enrich?wskey=api2demo&text=Paranguaricutirimicuaro&lang=en&type=concept"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept-nomatch.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo(
+ "/entity/enrich?wskey=api2demo&text=Lietuvos%20Centrinis%20Valstyb%C3%A9s%20Archyvas&type=organization"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-organization.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo(
+ "/entity/enrich?wskey=api2demo&text=EFG%20-%20The%20European%20Film%20Gateway&lang=en&type=organization"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-organization.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entitymgmt/organization/base/1482250000004671158?wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-mgmt-organization-base.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/enrich?wskey=api2demo&text=1957&lang=en&type=timespan"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-timespan.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/entity/resolve?uri=http://dbpedia.org/resource/Lithuanian_Soviet_Socialist_Republic_%25281918%25E2%2580%25931919%2529&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-nomatch.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=https://sws.geonames.org/597427/&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept.json"))
+ .withStatus(HttpStatus.OK.value())));
+
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://vocab.getty.edu/aat/300136900&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://vocab.getty.edu/aat/300008372&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-nomatch.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://vocab.getty.edu/aat/300000810&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-nomatch.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://data.europeana.eu/concept/3401&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept-iv.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/entity/resolve?uri=http://data.europeana.eu/concept/XXXXXXXXX&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept-notfound.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://dbpedia.org/resource/Lithuania&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-place.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/enrich?wskey=api2demo&text=Muziek&lang=nl&type=concept"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept-ii.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo("/entitymgmt/concept/base/62?wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(
+ getResourceFileContent("entity-api/entity-api-response-mgmt-concept-ii-base.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/enrich?wskey=api2demo&text=G%C3%BCiro&lang=es&type=concept"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-concept-iii.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/entity/resolve?uri=http://www.mimo-db.eu/InstrumentsKeywords/0&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept-ii.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/entity/resolve?uri=http://www.mimo-db.eu/InstrumentsKeywords/3052&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept-ii.json"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/entity/resolve?uri=http://vocab.getty.edu/aat/400136800&wskey=api2demo"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/json")
+ .withBody(getResourceFileContent("entity-api/entity-api-response-resolve-uri-concept.json"))
+ .withStatus(HttpStatus.OK.value())));
+ }
- } else {
- verify(dereferencer, never()).dereference(any());
+ private static void setDereferenceMocks() {
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=https%3A%2F%2Fsws.geonames.org%2F597427%2F"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-geoname.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=https%3A%2F%2Fsws.geonames.org%2F597427%2F1"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withStatus(HttpStatus.NOT_FOUND.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fvocab.getty.edu%2Faat%2F300136900"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-vocabulary.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fvocab.getty.edu%2Faat%2F300008372"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-null.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fvocab.getty.edu%2Faat%2F300000810"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-no-vocabulary.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fdata.europeana.eu%2Fconcept%2F3401"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-normal-ii.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fdata.europeana.eu%2Fconcept%2FXXXXXXXXX"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(
+ getResourceFileContent("dereference/dereference-unknown-europeana-entity.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fwww.mimo-db.eu%2FInstrumentsKeywords%2F3052"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-normal.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fwww.mimo-db.eu%2FInstrumentsKeywords%2F0"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-null.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(get(urlEqualTo(
+ "/dereference?uri=http%3A%2F%2Fsemantics.gr%2Fauthorities%2Fthematic_tags%2F994210004"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-normal-redirect.xml"))
+ .withStatus(HttpStatus.OK.value())));
+ wireMockServer.stubFor(
+ get(urlEqualTo("/dereference?uri=http%3A%2F%2Fvocab.getty.edu%2Faat%2F400136800"))
+ .withHost(equalTo("dereference-rest.mock"))
+ .willReturn(aResponse()
+ .withHeader("Content-Type", "application/xml")
+ .withBody(getResourceFileContent("dereference/dereference-failure.xml"))
+ .withStatus(HttpStatus.OK.value())));
}
- }
- // Verify enrichment related calls
- private void verifyEnrichmentHappyFlow(boolean doEnrichment, Enricher enricher,
- RDF inputRdf) throws EnrichmentException {
- if (doEnrichment) {
- verify(enricher, times(1)).enrichment(inputRdf);
+ @ParameterizedTest
+ @MethodSource("providedInputRecords")
+ void testEnrichmentWorkerHappyFlow(String inputRecord, RecordStatus recordStatus)
+ throws DereferenceException, EnrichmentException {
+ setDereferenceMocks();
+ setEntityAPIMocks();
+
+ TreeSet modeSetWithBoth = new TreeSet<>();
+ modeSetWithBoth.add(Mode.ENRICHMENT);
+ modeSetWithBoth.add(Mode.DEREFERENCE);
+
+ EnricherProvider enricherProvider = new EnricherProvider();
+ enricherProvider.setEnrichmentPropertiesValues(
+ "http://localhost:" + wireMockServer.port() + "/entitymgmt",
+ "http://localhost:" + wireMockServer.port() + "/entity",
+ "api2demo");
+
+ final Enricher enricher = enricherProvider.create();
- } else {
- verify(enricher, never()).enrichment(any());
+ DereferencerProvider dereferencerProvider = new DereferencerProvider();
+ dereferencerProvider.setEnrichmentPropertiesValues("http://entity-api.mock/entitymgmt",
+ "http://entity-api.mock/entity",
+ "api2demo");
+
+ dereferencerProvider.setDereferenceUrl("http://dereference-rest.mock");
+ final Dereferencer dereferencer = dereferencerProvider.create();
+
+ // Execute the worker
+ final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(dereferencer, enricher);
+
+ ProcessedResult output = worker.process(inputRecord, modeSetWithBoth);
+
+ LOGGER.info("REPORT: {}\n\n", output.getReport());
+ LOGGER.info("RECORD: {}\n\n", output.getProcessedRecord());
+ LOGGER.info("STATUS: {}", output.getRecordStatus());
+ assertEquals(recordStatus, output.getRecordStatus());
}
- }
- private void verifyEnrichmentNullFlow(boolean doEnrichment, Enricher worker, RDF inputRdf)
- throws EnrichmentException {
- if (doEnrichment) {
- verify(worker, times(1)).enrichment(inputRdf);
+ @Test
+ void testEnrichmentWorkerHappyFlow() throws DereferenceException, EnrichmentException {
+ TreeSet modeSetWithOnlyEnrichment = new TreeSet<>();
+ TreeSet modeSetWithOnlyDereference = new TreeSet<>();
+ TreeSet modeSetWithBoth = new TreeSet<>();
+
+ modeSetWithOnlyEnrichment.add(Mode.ENRICHMENT);
+ testEnrichmentWorkerHappyFlow(modeSetWithOnlyEnrichment);
+ modeSetWithOnlyDereference.add(Mode.DEREFERENCE);
+ testEnrichmentWorkerHappyFlow(modeSetWithOnlyDereference);
+ modeSetWithBoth.add(Mode.ENRICHMENT);
+ modeSetWithBoth.add(Mode.DEREFERENCE);
+ testEnrichmentWorkerHappyFlow(modeSetWithBoth);
+ }
- } else {
- verify(worker, never()).enrichment(any());
+ @Test
+ void testEnrichmentWorkerNullFlow() throws DereferenceException, EnrichmentException {
+ TreeSet modeSetWithOnlyEnrichment = new TreeSet<>();
+ TreeSet modeSetWithOnlyDereference = new TreeSet<>();
+ TreeSet modeSetWithBoth = new TreeSet<>();
+
+ modeSetWithOnlyEnrichment.add(Mode.ENRICHMENT);
+ testEnrichmentWorkerNullFlow(modeSetWithOnlyEnrichment);
+ modeSetWithOnlyDereference.add(Mode.DEREFERENCE);
+ testEnrichmentWorkerNullFlow(modeSetWithOnlyDereference);
+ modeSetWithBoth.add(Mode.ENRICHMENT);
+ modeSetWithBoth.add(Mode.DEREFERENCE);
+ testEnrichmentWorkerNullFlow(modeSetWithBoth);
}
- }
-
- @Test
- void testProcessWrapperMethods() throws SerializationException {
- // Create enrichment worker and mock the actual worker method as well as the RDF conversion
- // methods.
- final EnrichmentWorkerImpl worker = spy(new EnrichmentWorkerImpl(null, null));
- final RDF inputRdf = new RDF();
- final String outputString = "OutputString";
- doReturn(inputRdf).when(worker).convertStringToRdf(anyString());
- doReturn(outputString).when(worker).convertRdfToString(inputRdf);
-
- doReturn(new ProcessedResult<>(inputRdf)).when(worker).process(any(RDF.class), any());
-
- // Perform the operations and verify the result
- final ProcessedResult rdfProcessedResult = worker.process(inputRdf);
- final RDF returnedRdf = rdfProcessedResult.getProcessedRecord();
- assertEquals(inputRdf, returnedRdf);
- assertTrue(rdfProcessedResult.getReport().isEmpty());
- assertEquals(RecordStatus.CONTINUE, rdfProcessedResult.getRecordStatus());
-
- final ProcessedResult stringProcessedResult = worker.process("");
- final String returnedString = stringProcessedResult.getProcessedRecord();
- assertEquals(outputString, returnedString);
- assertTrue(rdfProcessedResult.getReport().isEmpty());
- assertEquals(RecordStatus.CONTINUE, rdfProcessedResult.getRecordStatus());
-
- TreeSet modeSetWithBoth = new TreeSet<>();
- modeSetWithBoth.add(Mode.ENRICHMENT);
- modeSetWithBoth.add(Mode.DEREFERENCE);
-
- // Validate the method calls to the actual worker method
- verify(worker, times(2)).process(any(RDF.class), any());
- verify(worker, times(2)).process(inputRdf, modeSetWithBoth);
-
- // Test null string input
- ProcessedResult resultString = worker.process((String) null);
- assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
- for (Report report : resultString.getReport()) {
- assertEquals(Type.ERROR, report.getMessageType());
- assertTrue(report.getMessage().contains("Input RDF string cannot be null."));
- assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+
+ private void testEnrichmentWorkerHappyFlow(Set modes)
+ throws DereferenceException, EnrichmentException {
+
+ // Create enricher and mock it.
+ final Enricher enricher = mock(EnricherImpl.class);
+
+ final Dereferencer dereferencer = mock(DereferencerImpl.class);
+
+ // Execute the worker
+ final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(dereferencer, enricher);
+ final RDF inputRdf = new RDF();
+ worker.process(inputRdf, modes);
+
+ // Counters of method calls depend on the mode
+ final boolean doDereferencing = modes.contains(Mode.DEREFERENCE);
+ final boolean doEnrichment = modes.contains(Mode.ENRICHMENT);
+
+ // Check the performed tasks
+ verifyDereferencingHappyFlow(doDereferencing, dereferencer, inputRdf);
+ verifyEnrichmentHappyFlow(doEnrichment, enricher, inputRdf);
+ // verifyMergeHappyFlow(doEnrichment, doDereferencing, entityMergeEngine);
}
- assertEquals(1, resultString.getReport().size());
- assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
- }
-
- @Test
- void testEnrichmentWorkerSerializationException() {
- final EnrichmentWorkerImpl worker = spy(new EnrichmentWorkerImpl(null, null));
-
- final ProcessedResult stringProcessedResult = worker.process(
- "\n"
- + "\n"
- + "\n"
- + " Europe Cultural Heritage\n"
- + " Europeana\n"
- + "\n"
- + "");
- final String returnedString = stringProcessedResult.getProcessedRecord();
- assertEquals(null, returnedString);
- for (Report report : stringProcessedResult.getReport()) {
- assertEquals(Type.ERROR, report.getMessageType());
- assertTrue(report.getMessage().contains("Error serializing rdf"));
- assertTrue(report.getStackTrace().contains("SerializationException: Something went wrong with converting to or from the RDF format."));
+
+ private void testEnrichmentWorkerNullFlow(Set modes)
+ throws DereferenceException, EnrichmentException {
+
+ // Create enrichment worker and mock the enrichment and dereferencing results.
+ final Enricher enricher = mock(EnricherImpl.class);
+
+ final Dereferencer dereferencer = mock(DereferencerImpl.class);
+
+ // Execute the worker
+ final EnrichmentWorkerImpl worker =
+ spy(new EnrichmentWorkerImpl(dereferencer, enricher));
+ final RDF inputRdf = new RDF();
+ worker.process(inputRdf, modes);
+
+ // Counters of method calls depend on the mode
+ final boolean doDereferencing = modes.contains(Mode.DEREFERENCE);
+ final boolean doEnrichment = modes.contains(Mode.ENRICHMENT);
+
+ // Check the performed tasks
+ verifyDereferencingNullFlow(doDereferencing, dereferencer, inputRdf);
+ verifyEnrichmentNullFlow(doEnrichment, enricher, inputRdf);
+
+ }
+
+ // Verify dereference related calls
+ private void verifyDereferencingHappyFlow(boolean doDereferencing, Dereferencer dereferencer,
+ RDF inputRdf) throws DereferenceException {
+ if (doDereferencing) {
+ verify(dereferencer, times(1)).dereference(inputRdf);
+
+ } else {
+ verify(dereferencer, never()).dereference(any());
+ }
}
- assertEquals(1, stringProcessedResult.getReport().size());
- assertEquals(RecordStatus.STOP, stringProcessedResult.getRecordStatus());
-
- // Validate the method calls to the actual worker method
- verify(worker, times(1)).process(any(String.class), any());
- }
-
- @Test
- void testEnrichmentWorkerInputNullValues() {
- // Create enrichment worker
- final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(null, null);
-
- TreeSet modeSetWithBoth = new TreeSet<>();
- modeSetWithBoth.add(Mode.ENRICHMENT);
- modeSetWithBoth.add(Mode.DEREFERENCE);
-
- // Test null string input
- ProcessedResult resultString = worker.process((String) null, modeSetWithBoth);
- assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
- for (Report report : resultString.getReport()) {
- assertEquals(Type.ERROR, report.getMessageType());
- assertTrue(report.getMessage().contains("Input RDF string cannot be null."));
- assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+
+ private void verifyDereferencingNullFlow(boolean doDereferencing, Dereferencer dereferencer,
+ RDF inputRdf) throws DereferenceException {
+ if (doDereferencing) {
+
+ verify(dereferencer, times(1)).dereference(inputRdf);
+
+ } else {
+ verify(dereferencer, never()).dereference(any());
+ }
+ }
+
+ // Verify enrichment related calls
+ private void verifyEnrichmentHappyFlow(boolean doEnrichment, Enricher enricher,
+ RDF inputRdf) throws EnrichmentException {
+ if (doEnrichment) {
+ verify(enricher, times(1)).enrichment(inputRdf);
+
+ } else {
+ verify(enricher, never()).enrichment(any());
+ }
}
- assertEquals(1, resultString.getReport().size());
- assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
- }
-
- @Test
- void testEnrichmentWorkerModeNullValues() {
- // Create enrichment worker
- final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(null, null);
-
- // Test empty RDF input
- ProcessedResult resultRdf = worker.process(new RDF(), null);
- assertEquals(RecordStatus.STOP, resultRdf.getRecordStatus());
- for (Report report : resultRdf.getReport()) {
- assertEquals(Type.ERROR, report.getMessageType());
- assertTrue(report.getMessage().contains("Set of Modes cannot be null"));
- assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+
+ private void verifyEnrichmentNullFlow(boolean doEnrichment, Enricher worker, RDF inputRdf)
+ throws EnrichmentException {
+ if (doEnrichment) {
+ verify(worker, times(1)).enrichment(inputRdf);
+
+ } else {
+ verify(worker, never()).enrichment(any());
+ }
+ }
+
+ @Test
+ void testProcessWrapperMethods() throws SerializationException {
+ // Create enrichment worker and mock the actual worker method as well as the RDF conversion
+ // methods.
+ final EnrichmentWorkerImpl worker = spy(new EnrichmentWorkerImpl(null, null));
+ final RDF inputRdf = new RDF();
+ final String outputString = "OutputString";
+ doReturn(inputRdf).when(worker).convertStringToRdf(anyString());
+ doReturn(outputString).when(worker).convertRdfToString(inputRdf);
+
+ doReturn(new ProcessedResult<>(inputRdf)).when(worker).process(any(RDF.class), any());
+
+ // Perform the operations and verify the result
+ final ProcessedResult rdfProcessedResult = worker.process(inputRdf);
+ final RDF returnedRdf = rdfProcessedResult.getProcessedRecord();
+ assertEquals(inputRdf, returnedRdf);
+ assertTrue(rdfProcessedResult.getReport().isEmpty());
+ assertEquals(RecordStatus.CONTINUE, rdfProcessedResult.getRecordStatus());
+
+ final ProcessedResult stringProcessedResult = worker.process("");
+ final String returnedString = stringProcessedResult.getProcessedRecord();
+ assertEquals(outputString, returnedString);
+ assertTrue(rdfProcessedResult.getReport().isEmpty());
+ assertEquals(RecordStatus.CONTINUE, rdfProcessedResult.getRecordStatus());
+
+ TreeSet modeSetWithBoth = new TreeSet<>();
+ modeSetWithBoth.add(Mode.ENRICHMENT);
+ modeSetWithBoth.add(Mode.DEREFERENCE);
+
+ // Validate the method calls to the actual worker method
+ verify(worker, times(2)).process(any(RDF.class), any());
+ verify(worker, times(2)).process(inputRdf, modeSetWithBoth);
+
+ // Test null string input
+ ProcessedResult resultString = worker.process((String) null);
+ assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
+ for (Report report : resultString.getReport()) {
+ assertEquals(Type.ERROR, report.getMessageType());
+ assertTrue(report.getMessage().contains("Input RDF string cannot be null."));
+ assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+ }
+ assertEquals(1, resultString.getReport().size());
+ assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
+ }
+
+ @Test
+ void testEnrichmentWorkerSerializationException() {
+ final EnrichmentWorkerImpl worker = spy(new EnrichmentWorkerImpl(null, null));
+
+ final ProcessedResult stringProcessedResult = worker.process(
+ "\n"
+ + "\n"
+ + "\n"
+ + " Europe Cultural Heritage\n"
+ + " Europeana\n"
+ + "\n"
+ + "");
+ final String returnedString = stringProcessedResult.getProcessedRecord();
+ assertEquals(null, returnedString);
+ for (Report report : stringProcessedResult.getReport()) {
+ assertEquals(Type.ERROR, report.getMessageType());
+ assertTrue(report.getMessage().contains("Error serializing rdf"));
+ assertTrue(report.getStackTrace().contains(
+ "SerializationException: Something went wrong with converting to or from the RDF format."));
+ }
+ assertEquals(1, stringProcessedResult.getReport().size());
+ assertEquals(RecordStatus.STOP, stringProcessedResult.getRecordStatus());
+
+ // Validate the method calls to the actual worker method
+ verify(worker, times(1)).process(any(String.class), any());
+ }
+
+ @Test
+ void testEnrichmentWorkerInputNullValues() {
+ // Create enrichment worker
+ final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(null, null);
+
+ TreeSet modeSetWithBoth = new TreeSet<>();
+ modeSetWithBoth.add(Mode.ENRICHMENT);
+ modeSetWithBoth.add(Mode.DEREFERENCE);
+
+ // Test null string input
+ ProcessedResult resultString = worker.process((String) null, modeSetWithBoth);
+ assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
+ for (Report report : resultString.getReport()) {
+ assertEquals(Type.ERROR, report.getMessageType());
+ assertTrue(report.getMessage().contains("Input RDF string cannot be null."));
+ assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+ }
+ assertEquals(1, resultString.getReport().size());
+ assertEquals(RecordStatus.STOP, resultString.getRecordStatus());
+ }
+
+ @Test
+ void testEnrichmentWorkerModeNullValues() {
+ // Create enrichment worker
+ final EnrichmentWorkerImpl worker = new EnrichmentWorkerImpl(null, null);
+
+ // Test empty RDF input
+ ProcessedResult resultRdf = worker.process(new RDF(), null);
+ assertEquals(RecordStatus.STOP, resultRdf.getRecordStatus());
+ for (Report report : resultRdf.getReport()) {
+ assertEquals(Type.ERROR, report.getMessageType());
+ assertTrue(report.getMessage().contains("Set of Modes cannot be null"));
+ assertTrue(report.getStackTrace().contains("IllegalArgumentException"));
+ }
+ assertEquals(1, resultRdf.getReport().size());
+ assertEquals(RecordStatus.STOP, resultRdf.getRecordStatus());
}
- assertEquals(1, resultRdf.getReport().size());
- assertEquals(RecordStatus.STOP, resultRdf.getRecordStatus());
- }
}
diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java
index 4e3cf17fe1..c98fb91050 100644
--- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java
+++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java
@@ -1,22 +1,5 @@
package eu.europeana.enrichment.rest.client.dereference;
-import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
-import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static com.github.tomakehurst.wiremock.client.WireMock.ok;
-import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.anySet;
-import static org.mockito.Mockito.anyString;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.common.ConsoleNotifier;
import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer;
@@ -40,8 +23,25 @@
import eu.europeana.metis.schema.jibx.Concept;
import eu.europeana.metis.schema.jibx.PlaceType;
import eu.europeana.metis.schema.jibx.RDF;
+import org.apache.commons.collections.CollectionUtils;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.mockito.ArgumentCaptor;
+import org.springframework.http.HttpStatus;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.web.client.HttpServerErrorException;
+
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.ServiceUnavailableException;
import java.net.MalformedURLException;
+import java.net.SocketTimeoutException;
import java.net.URL;
+import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -52,363 +52,402 @@
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.collections.CollectionUtils;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.springframework.http.HttpStatus;
-import org.springframework.web.client.HttpClientErrorException;
+import java.util.stream.Stream;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anySet;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
/**
* Unit tests for {@link DereferencerImpl} class
*/
class DereferencerImplTest {
- private static final Map, Set> DEREFERENCE_EXTRACT_RESULT_INVALID = Map.of(
- AboutType.class, Set.of("htt://invalid-example.host/about"),
- Concept.class, Set.of("httpx://invalid-example.host/concept"),
- PlaceType.class, Set.of("http://invalid-example host/place?val=ab"));
-
- private static final Map, Set> DEREFERENCE_EXTRACT_RESULT_VALID = Map.of(
- AboutType.class, Set.of("http://valid-example.host/about"),
- Concept.class, Set.of("http://valid-example.host/concept"),
- PlaceType.class, Set.of("http://valid-example.host/place"));
-
- private static final List DEREFERENCE_RESULT;
- private static final Map> ENRICHMENT_RESULT = new HashMap<>();
-
- private static WireMockServer wireMockServer;
-
- @BeforeAll
- static void createWireMock() {
- wireMockServer = new WireMockServer(wireMockConfig()
- .dynamicPort()
- .enableBrowserProxying(true)
- .notifier(new ConsoleNotifier(true)));
- wireMockServer.start();
- JvmProxyConfigurer.configureFor(wireMockServer);
- }
-
- @AfterAll
- static void tearDownWireMock() {
- wireMockServer.stop();
- }
-
- static {
- final Agent agent1 = new Agent();
- agent1.setAbout("agent1");
- final Agent agent2 = new Agent();
- agent2.setAbout("agent2");
- final Place place1 = new Place();
- place1.setAbout("place1");
- final Place place2 = new Place();
- place2.setAbout("place2");
- final TimeSpan timeSpan1 = new TimeSpan();
- timeSpan1.setAbout("timespan1");
- final TimeSpan timeSpan2 = new TimeSpan();
- timeSpan2.setAbout("timespan2");
- final List enrichmentResultBaseWrapperList1 = EnrichmentResultBaseWrapper
- .createEnrichmentResultBaseWrapperList(List.of(Arrays.asList(agent1, null, agent2)), DereferenceResultStatus.SUCCESS);
- final EnrichmentResultList dereferenceResult1 = new EnrichmentResultList(
- enrichmentResultBaseWrapperList1);
- final List enrichmentResultBaseWrapperList2 = EnrichmentResultBaseWrapper
- .createEnrichmentResultBaseWrapperList(List.of(Arrays.asList(timeSpan1, timeSpan2, null)),
- DereferenceResultStatus.SUCCESS);
- final EnrichmentResultList dereferenceResult2 = new EnrichmentResultList(
- enrichmentResultBaseWrapperList2);
- DEREFERENCE_RESULT = Arrays.asList(dereferenceResult1, null, dereferenceResult2);
-
- SearchTerm searchTerm1 = new SearchTermImpl("value1", "en", Set.of(EntityType.PLACE));
- SearchTerm searchTerm2 = new SearchTermImpl("value2", "en", Set.of(EntityType.CONCEPT));
- SearchTerm searchTerm3 = new SearchTermImpl("value3", "en", Set.of(EntityType.AGENT));
-
- ENRICHMENT_RESULT.put(searchTerm1, List.of(place1));
- ENRICHMENT_RESULT.put(searchTerm2, null);
- ENRICHMENT_RESULT.put(searchTerm3, List.of(place2));
- }
-
- @Test
- void testDereferencerHappyFlow() throws MalformedURLException {
- // Create mocks of the dependencies
- final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
- doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
- final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
- doReturn(DEREFERENCE_RESULT.get(0),
- DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient)
- .dereference(any());
- final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
-
- final Dereferencer dereferencer = spy(
- new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
- doReturn(DEREFERENCE_EXTRACT_RESULT_VALID).when(dereferencer).extractReferencesForDereferencing(any());
-
- wireMockServer.stubFor(get("/about")
- .withHost(equalTo("valid-example.host"))
- .willReturn(ok("about")));
- wireMockServer.stubFor(get("/concept")
- .withHost(equalTo("valid-example.host"))
- .willReturn(ok("concept")));
- wireMockServer.stubFor(get("/place")
- .withHost(equalTo("valid-example.host"))
- .willReturn(ok("place")));
-
- final RDF inputRdf = new RDF();
- Set reports = dereferencer.dereference(inputRdf);
-
- verifyDereferenceHappyFlow(dereferenceClient, dereferencer, inputRdf, reports);
- verifyMergeHappyFlow(entityMergeEngine);
- }
-
- @Test
- void testDereferencerNullFlow() {
- // Create mocks of the dependencies
- final ClientEntityResolver entityResolver = mock(ClientEntityResolver.class);
- final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
-
- final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
-
- // Create dereferencer.
- final Dereferencer dereferencer = spy(
- new DereferencerImpl(entityMergeEngine, entityResolver, dereferenceClient));
- doReturn(Collections.emptyMap()).when(dereferencer).extractReferencesForDereferencing(any());
-
- final RDF inputRdf = new RDF();
- dereferencer.dereference(inputRdf);
-
- verifyDereferenceNullFlow(dereferenceClient, dereferencer, inputRdf);
- verifyMergeNullFlow(entityMergeEngine);
- }
-
- @Test
- void testDereferenceInvalidUrl() throws MalformedURLException {
- // Create mocks of the dependencies
- final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
- doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
- final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
- doReturn(DEREFERENCE_RESULT.get(0),
- DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient)
- .dereference(any());
- final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
-
- final Dereferencer dereferencer = spy(
- new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
- doReturn(DEREFERENCE_EXTRACT_RESULT_INVALID).when(dereferencer).extractReferencesForDereferencing(any());
-
- final RDF inputRdf = new RDF();
- Set reports = dereferencer.dereference(inputRdf);
-
- verifyDereferenceInvalidUrlFlow(dereferenceClient, dereferencer, inputRdf, reports);
- verifyMergeExceptionFlow(entityMergeEngine);
- }
-
- @Test
- void testDereferenceHttpException() throws MalformedURLException {
- // Create mocks of the dependencies
- final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
- doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
- final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
- when(dereferenceClient.dereference(any()))
- .thenThrow(HttpClientErrorException.create(HttpStatus.BAD_REQUEST, "", null, null, null));
- final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
-
- final Dereferencer dereferencer = spy(
- new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
- doReturn(DEREFERENCE_EXTRACT_RESULT_VALID).when(dereferencer).extractReferencesForDereferencing(any());
-
- final RDF inputRdf = new RDF();
- Set reports = dereferencer.dereference(inputRdf);
-
- verifyDereferenceExceptionFlow(dereferenceClient, dereferencer, inputRdf, reports);
- verifyMergeExceptionFlow(entityMergeEngine);
- }
-
- private void verifyDereferenceHappyFlow(DereferenceClient dereferenceClient,
- Dereferencer dereferencer, RDF inputRdf, Set reports) {
-
- verifyDerefencer(dereferencer, inputRdf);
-
- // Actually dereferencing.
- verify(dereferenceClient, times(DEREFERENCE_EXTRACT_RESULT_VALID.size())).dereference(anyString());
-
- assertEquals(1, reports.size());
- for (Report report : reports) {
- assertTrue(report.getMessage().contains("Dereferencing or Coreferencing: the europeana entity does not exist"));
- assertEquals(Type.WARN, report.getMessageType());
- assertEquals(Mode.DEREFERENCE, report.getMode());
- assertEquals("http://valid-example.host/concept", report.getValue());
- assertEquals("", report.getStackTrace());
+ private static final Map, Set> DEREFERENCE_EXTRACT_RESULT_INVALID = Map.of(
+ AboutType.class, Set.of("htt://invalid-example.host/about"),
+ Concept.class, Set.of("httpx://invalid-example.host/concept"),
+ PlaceType.class, Set.of("http://invalid-example host/place?val=ab"));
+
+ private static final Map, Set> DEREFERENCE_EXTRACT_RESULT_VALID = Map.of(
+ AboutType.class, Set.of("http://valid-example.host/about"),
+ Concept.class, Set.of("http://data.europeana.eu.host/concept"),
+ PlaceType.class, Set.of("http://valid-example.host/place"));
+
+ private static final List DEREFERENCE_RESULT;
+ private static final Map> ENRICHMENT_RESULT = new HashMap<>();
+
+ private static WireMockServer wireMockServer;
+
+ static {
+ final Agent agent1 = new Agent();
+ agent1.setAbout("agent1");
+ final Agent agent2 = new Agent();
+ agent2.setAbout("agent2");
+ final Place place1 = new Place();
+ place1.setAbout("place1");
+ final Place place2 = new Place();
+ place2.setAbout("place2");
+ final TimeSpan timeSpan1 = new TimeSpan();
+ timeSpan1.setAbout("timespan1");
+ final TimeSpan timeSpan2 = new TimeSpan();
+ timeSpan2.setAbout("timespan2");
+ final List enrichmentResultBaseWrapperList1 = EnrichmentResultBaseWrapper
+ .createEnrichmentResultBaseWrapperList(List.of(Arrays.asList(agent1, null, agent2)), DereferenceResultStatus.SUCCESS);
+ final EnrichmentResultList dereferenceResult1 = new EnrichmentResultList(
+ enrichmentResultBaseWrapperList1);
+ final List enrichmentResultBaseWrapperList2 = EnrichmentResultBaseWrapper
+ .createEnrichmentResultBaseWrapperList(List.of(Arrays.asList(timeSpan1, timeSpan2, null)),
+ DereferenceResultStatus.SUCCESS);
+ final EnrichmentResultList dereferenceResult2 = new EnrichmentResultList(
+ enrichmentResultBaseWrapperList2);
+ DEREFERENCE_RESULT = Arrays.asList(dereferenceResult1, null, dereferenceResult2);
+
+ SearchTerm searchTerm1 = new SearchTermImpl("value1", "en", Set.of(EntityType.PLACE));
+ SearchTerm searchTerm2 = new SearchTermImpl("value2", "en", Set.of(EntityType.CONCEPT));
+ SearchTerm searchTerm3 = new SearchTermImpl("value3", "en", Set.of(EntityType.AGENT));
+
+ ENRICHMENT_RESULT.put(searchTerm1, List.of(place1));
+ ENRICHMENT_RESULT.put(searchTerm2, null);
+ ENRICHMENT_RESULT.put(searchTerm3, List.of(place2));
+ }
+
+ @BeforeAll
+ static void createWireMock() {
+ wireMockServer = new WireMockServer(wireMockConfig()
+ .dynamicPort()
+ .enableBrowserProxying(true)
+ .notifier(new ConsoleNotifier(true)));
+ wireMockServer.start();
+ JvmProxyConfigurer.configureFor(wireMockServer);
+ }
+
+ @AfterAll
+ static void tearDownWireMock() {
+ wireMockServer.stop();
+ }
+
+ private static Stream providedExceptions() {
+ return Stream.of(
+ Arguments.of(HttpClientErrorException.create(HttpStatus.BAD_REQUEST, "", null, null, null), Type.WARN, "400 Bad Request"),
+ Arguments.of(HttpServerErrorException.create(HttpStatus.INTERNAL_SERVER_ERROR, "Error with service", null, null, null), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing."),
+ Arguments.of(new RuntimeException(new UnknownHostException("")), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing."),
+ Arguments.of(new RuntimeException(new SocketTimeoutException("Time out exceeded")), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing."),
+ Arguments.of(new RuntimeException(new ServiceUnavailableException("No service")), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing."),
+ Arguments.of(new NotFoundException(), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing."),
+ Arguments.of(new RuntimeException(new IllegalArgumentException("argument invalid")), Type.ERROR,
+ "Exception occurred while trying to perform dereferencing.")
+ );
+ }
+
+ @Test
+ void testDereferencerHappyFlow() throws MalformedURLException {
+ // Create mocks of the dependencies
+ final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
+ doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
+ final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
+ doReturn(DEREFERENCE_RESULT.get(0),
+ DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient)
+ .dereference(any());
+ final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
+
+ final Dereferencer dereferencer = spy(
+ new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
+ doReturn(DEREFERENCE_EXTRACT_RESULT_VALID).when(dereferencer).extractReferencesForDereferencing(any());
+
+ wireMockServer.stubFor(get("/about")
+ .withHost(equalTo("valid-example.host"))
+ .willReturn(ok("about")));
+ wireMockServer.stubFor(get("/concept")
+ .withHost(equalTo("data.europeana.eu.host"))
+ .willReturn(ok("concept")));
+ wireMockServer.stubFor(get("/place")
+ .withHost(equalTo("valid-example.host"))
+ .willReturn(ok("place")));
+
+ final RDF inputRdf = new RDF();
+ Set reports = dereferencer.dereference(inputRdf);
+
+ verifyDereferenceHappyFlow(dereferenceClient, dereferencer, inputRdf, reports);
+ verifyMergeHappyFlow(entityMergeEngine);
+ }
+
+ @Test
+ void testDereferencerNullFlow() {
+ // Create mocks of the dependencies
+ final ClientEntityResolver entityResolver = mock(ClientEntityResolver.class);
+ final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
+
+ final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
+
+ // Create dereferencer.
+ final Dereferencer dereferencer = spy(
+ new DereferencerImpl(entityMergeEngine, entityResolver, dereferenceClient));
+ doReturn(Collections.emptyMap()).when(dereferencer).extractReferencesForDereferencing(any());
+
+ final RDF inputRdf = new RDF();
+ dereferencer.dereference(inputRdf);
+
+ verifyDereferenceNullFlow(dereferenceClient, dereferencer, inputRdf);
+ verifyMergeNullFlow(entityMergeEngine);
+ }
+
+ @Test
+ void testDereferenceInvalidUrl() throws MalformedURLException {
+ // Create mocks of the dependencies
+ final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
+ doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
+ final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
+ doReturn(DEREFERENCE_RESULT.get(0),
+ DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient)
+ .dereference(any());
+ final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
+
+ final Dereferencer dereferencer = spy(
+ new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
+ doReturn(DEREFERENCE_EXTRACT_RESULT_INVALID).when(dereferencer).extractReferencesForDereferencing(any());
+
+ final RDF inputRdf = new RDF();
+ Set reports = dereferencer.dereference(inputRdf);
+
+ verifyDereferenceInvalidUrlFlow(dereferenceClient, dereferencer, inputRdf, reports);
+ verifyMergeExceptionFlow(entityMergeEngine);
}
- Set setOfValues = DEREFERENCE_EXTRACT_RESULT_VALID.values().stream().flatMap(Collection::stream)
- .collect(Collectors.toSet());
- for (String dereferenceUrl : setOfValues) {
- verify(dereferenceClient, times(1)).dereference(dereferenceUrl);
+
+ @Disabled("TODO: MET-4255 Improve execution time, think feasibility of @Value(\"${max-retries}\")")
+ @ParameterizedTest
+ @MethodSource("providedExceptions")
+ void testDereferenceNetworkException(Exception ex, Type expectedMessageType, String expectedMessage) throws MalformedURLException {
+ // Create mocks of the dependencies
+ final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class);
+ doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet());
+ final DereferenceClient dereferenceClient = mock(DereferenceClient.class);
+ doThrow(ex).when(dereferenceClient).dereference(any());
+ final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class);
+
+ final Dereferencer dereferencer = spy(
+ new DereferencerImpl(entityMergeEngine, clientEntityResolver, dereferenceClient));
+ doReturn(DEREFERENCE_EXTRACT_RESULT_VALID).when(dereferencer).extractReferencesForDereferencing(any());
+
+ final RDF inputRdf = new RDF();
+ Set reports = dereferencer.dereference(inputRdf);
+
+ verifyDereferenceExceptionFlow(dereferenceClient, dereferencer, inputRdf, reports, expectedMessageType, expectedMessage);
+ verifyMergeExceptionFlow(entityMergeEngine);
}
- }
- private void verifyDereferenceInvalidUrlFlow(DereferenceClient dereferenceClient,
- Dereferencer dereferencer, RDF inputRdf, Set reports) {
+ private void verifyDereferenceHappyFlow(DereferenceClient dereferenceClient,
+ Dereferencer dereferencer, RDF inputRdf, Set reports) {
+
+ verifyDerefencer(dereferencer, inputRdf);
+
+ // Actually dereferencing.
+ verify(dereferenceClient, times(DEREFERENCE_EXTRACT_RESULT_VALID.size())).dereference(anyString());
+
+ assertEquals(2, reports.size());
+
+ Set expectedReports = Set.of(Report.buildDereferenceError()
+ .withValue("http://data.europeana.eu.host/concept")
+ .withMessage("Dereference or Coreferencing failed."),
+ Report.buildDereferenceWarn()
+ .withStatus(HttpStatus.OK)
+ .withValue("http://data.europeana.eu.host/concept")
+ .withMessage("Dereferencing or Coreferencing: the europeana entity does not exist."));
- // Extracting values for dereferencing
- verifyDerefencer(dereferencer, inputRdf);
+ assertTrue(CollectionUtils.isEqualCollection(expectedReports, reports));
- // Checking the report.
- assertEquals(3, reports.size());
- for (Report report : reports) {
- assertEquals(Type.IGNORE, report.getMessageType());
- assertEquals(Mode.DEREFERENCE, report.getMode());
+ Set setOfValues = DEREFERENCE_EXTRACT_RESULT_VALID.values().stream().flatMap(Collection::stream)
+ .collect(Collectors.toSet());
+ for (String dereferenceUrl : setOfValues) {
+ verify(dereferenceClient, times(1)).dereference(dereferenceUrl);
+ }
}
- Set setOfValues = DEREFERENCE_EXTRACT_RESULT_VALID.values().stream().flatMap(Collection::stream)
- .collect(Collectors.toSet());
- for (String dereferenceUrl : setOfValues) {
- verify(dereferenceClient, times(0)).dereference(dereferenceUrl);
+
+ private void verifyDereferenceInvalidUrlFlow(DereferenceClient dereferenceClient,
+ Dereferencer dereferencer, RDF inputRdf, Set reports) {
+
+ // Extracting values for dereferencing
+ verifyDerefencer(dereferencer, inputRdf);
+
+ // Checking the report.
+ assertEquals(3, reports.size());
+ for (Report report : reports) {
+ assertEquals(Type.IGNORE, report.getMessageType());
+ assertEquals(Mode.DEREFERENCE, report.getMode());
+ }
+ Set setOfValues = DEREFERENCE_EXTRACT_RESULT_VALID.values().stream().flatMap(Collection::stream)
+ .collect(Collectors.toSet());
+ for (String dereferenceUrl : setOfValues) {
+ verify(dereferenceClient, times(0)).dereference(dereferenceUrl);
+ }
}
- }
- private void verifyDereferenceExceptionFlow(DereferenceClient dereferenceClient,
- Dereferencer dereferencer, RDF inputRdf, Set reports) {
+ private void verifyDereferenceExceptionFlow(DereferenceClient dereferenceClient,
+ Dereferencer dereferencer, RDF inputRdf,
+ Set reports, Type expectedType,
+ String expectedMessage) {
- // Extracting values for dereferencing
- verifyDerefencer(dereferencer, inputRdf);
+ // Extracting values for dereferencing
+ verifyDerefencer(dereferencer, inputRdf);
- // Actually dereferencing.
- verify(dereferenceClient, times(DEREFERENCE_EXTRACT_RESULT_VALID.size())).dereference(anyString());
+ // Actually dereferencing.
+ verify(dereferenceClient, atLeast(DEREFERENCE_EXTRACT_RESULT_VALID.size())).dereference(anyString());
- // Checking the report.
- assertEquals(3, reports.size());
- for (Report report : reports) {
- assertTrue(report.getMessage().contains("400 Bad Request"));
- assertEquals(Type.WARN, report.getMessageType());
+ // Checking the report.
+ assertEquals(3, reports.size());
+ for (Report report : reports) {
+ assertTrue(report.getMessage().contains(expectedMessage));
+ assertEquals(expectedType, report.getMessageType());
+ }
}
- }
-
- // Verify merge calls
- private void verifyMergeHappyFlow(EntityMergeEngine entityMergeEngine) throws MalformedURLException {
- ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass(List.class);
- List expectedList = prepareExpectedList();
- verify(entityMergeEngine, times(1))
- .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
- assertEquals(expectedList.size(), argumentCaptor.getValue().size());
- for (int i = 0; i < expectedList.size(); i++) {
- DereferencedEntities expectedElement = expectedList.get(i);
- DereferencedEntities capturedElement = argumentCaptor.getValue().get(i);
- assertEquals(expectedElement.getClassType(), capturedElement.getClassType());
- assertEquals(expectedElement.getReportMessages().size(), capturedElement.getReportMessages().size());
- assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReportMessages(), capturedElement.getReportMessages()));
- assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReferenceTermListMap().keySet(),
- capturedElement.getReferenceTermListMap().keySet()));
- assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReferenceTermListMap().values(),
- capturedElement.getReferenceTermListMap().values()));
+
+ // Verify merge calls
+ private void verifyMergeHappyFlow(EntityMergeEngine entityMergeEngine) throws MalformedURLException {
+ ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass(List.class);
+ List expectedList = prepareExpectedList();
+ verify(entityMergeEngine, times(1))
+ .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
+ assertEquals(expectedList.size(), argumentCaptor.getValue().size());
+ for (int i = 0; i < expectedList.size(); i++) {
+ DereferencedEntities expectedElement = expectedList.get(i);
+ DereferencedEntities capturedElement = argumentCaptor.getValue().get(i);
+ assertEquals(expectedElement.getClassType(), capturedElement.getClassType());
+ assertEquals(expectedElement.getReportMessages().size(), capturedElement.getReportMessages().size());
+ assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReportMessages(), capturedElement.getReportMessages()));
+ assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReferenceTermListMap().keySet(),
+ capturedElement.getReferenceTermListMap().keySet()));
+ assertTrue(CollectionUtils.isEqualCollection(expectedElement.getReferenceTermListMap().values(),
+ capturedElement.getReferenceTermListMap().values()));
+ }
}
- }
- private void verifyDereferenceNullFlow(DereferenceClient dereferenceClient,
- Dereferencer dereferencer, RDF inputRdf) {
+ private void verifyDereferenceNullFlow(DereferenceClient dereferenceClient,
+ Dereferencer dereferencer, RDF inputRdf) {
- // Extracting values for dereferencing
- verifyDerefencer(dereferencer, inputRdf);
+ // Extracting values for dereferencing
+ verifyDerefencer(dereferencer, inputRdf);
- // Actually dereferencing: don't use the null values.
- final Set dereferenceUrls = Arrays.stream(new String[0]).filter(Objects::nonNull)
- .collect(Collectors.toSet());
- verify(dereferenceClient, times(dereferenceUrls.size())).dereference(anyString());
- for (String dereferenceUrl : dereferenceUrls) {
- verify(dereferenceClient, times(1)).dereference(dereferenceUrl);
+ // Actually dereferencing: don't use the null values.
+ final Set dereferenceUrls = Arrays.stream(new String[0]).filter(Objects::nonNull)
+ .collect(Collectors.toSet());
+ verify(dereferenceClient, times(dereferenceUrls.size())).dereference(anyString());
+ for (String dereferenceUrl : dereferenceUrls) {
+ verify(dereferenceClient, times(1)).dereference(dereferenceUrl);
+ }
}
- }
-
- private void verifyDerefencer(Dereferencer dereferencer, RDF inputRdf) {
- // Extracting values for dereferencing
- verify(dereferencer, times(1)).extractReferencesForDereferencing(any());
- verify(dereferencer, times(1)).extractReferencesForDereferencing(inputRdf);
- }
-
- private void verifyMergeExceptionFlow(EntityMergeEngine entityMergeEngine) throws MalformedURLException {
- ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass((Class) List.class);
- List expectedList = prepareExpectedListMergeNull();
- verify(entityMergeEngine, times(1))
- .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
- assertEquals(expectedList.size(), argumentCaptor.getValue().size());
- for (int i = 0; i < expectedList.size(); i++) {
- DereferencedEntities expectedElement = expectedList.get(i);
- DereferencedEntities capturedElement = argumentCaptor.getValue().get(i);
- assertEquals(expectedElement.getClassType(), capturedElement.getClassType());
- assertEquals(expectedElement.getReportMessages().size(), capturedElement.getReportMessages().size());
+ private void verifyDerefencer(Dereferencer dereferencer, RDF inputRdf) {
+ // Extracting values for dereferencing
+ verify(dereferencer, times(1)).extractReferencesForDereferencing(any());
+ verify(dereferencer, times(1)).extractReferencesForDereferencing(inputRdf);
}
- }
-
- private void verifyMergeNullFlow(EntityMergeEngine entityMergeEngine) {
- ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass((Class) List.class);
- verify(entityMergeEngine, times(1))
- .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
- assertEquals(1, argumentCaptor.getValue().size());
- assertNull(argumentCaptor.getValue().get(0).getClassType());
- assertTrue(argumentCaptor.getValue().get(0).getReferenceTermListMap().isEmpty());
- assertTrue(argumentCaptor.getValue().get(0).getReportMessages().isEmpty());
- }
-
- private List prepareExpectedList() throws MalformedURLException {
- ReferenceTermImpl expectedReferenceTerm1 = new ReferenceTermImpl(new URL("http://valid-example.host/concept"));
- Set expectedReports1 = Set.of(Report.buildDereferenceWarn().withStatus(HttpStatus.OK)
- .withValue("http://valid-example.host/concept")
- .withMessage(
- "Dereferencing or Coreferencing: the europeana entity does not exist."));
- DereferencedEntities expectedDereferencedEntities1 = new DereferencedEntities(
- Map.of(expectedReferenceTerm1, new ArrayList<>()),
- expectedReports1, Concept.class);
-
- ReferenceTermImpl expectedReferenceTerm2 = new ReferenceTermImpl(new URL("http://valid-example.host/place"));
- List expectedEnrichmentBaseList2 = new ArrayList<>();
- expectedEnrichmentBaseList2.add(
- DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0));
- expectedEnrichmentBaseList2.add(
- DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(1));
- expectedEnrichmentBaseList2.add(null);
- DereferencedEntities expectedDereferencedEntities2 = new DereferencedEntities(
- Map.of(expectedReferenceTerm2, expectedEnrichmentBaseList2),
- Collections.emptySet(), PlaceType.class);
-
- ReferenceTermImpl expectedReferenceTerm3 = new ReferenceTermImpl(new URL("http://valid-example.host/about"));
- List expectedEnrichmentBaseList3 = new ArrayList<>();
- expectedEnrichmentBaseList3.add(
- DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0));
- expectedEnrichmentBaseList3.add(null);
- expectedEnrichmentBaseList3.add(
- DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(2));
- DereferencedEntities expectedDereferencedEntities3 = new DereferencedEntities(
- Map.of(expectedReferenceTerm3, expectedEnrichmentBaseList3),
- Collections.emptySet(), AboutType.class);
-
- return List.of(expectedDereferencedEntities3, expectedDereferencedEntities1, expectedDereferencedEntities2);
- }
-
- private List prepareExpectedListMergeNull() throws MalformedURLException {
- Report expectedReportConcept = Report.buildDereferenceWarn()
- .withStatus(HttpStatus.BAD_REQUEST)
- .withValue("http://valid-example.host/concept")
- .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
- Report expectedReportPlace = Report.buildDereferenceWarn()
- .withStatus(HttpStatus.BAD_REQUEST)
- .withValue("http://valid-example.host/place")
- .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
- Report expectedReportAbout = Report.buildDereferenceWarn()
- .withStatus(HttpStatus.BAD_REQUEST)
- .withValue("http://valid-example.host/about")
- .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
-
- ReferenceTermImpl referenceTerm1 = new ReferenceTermImpl(new URL("http://valid-example.host/about"));
- ReferenceTermImpl referenceTerm2 = new ReferenceTermImpl(new URL("http://valid-example.host/concept"));
- ReferenceTermImpl referenceTerm3 = new ReferenceTermImpl(new URL("http://valid-example.host/place"));
-
- DereferencedEntities dereferencedEntities1 = new DereferencedEntities(Map.of(referenceTerm1, Collections.emptyList()),
- Set.of(expectedReportAbout), AboutType.class);
- DereferencedEntities dereferencedEntities2 = new DereferencedEntities(Map.of(referenceTerm2, Collections.emptyList()),
- Set.of(expectedReportConcept), Concept.class);
- DereferencedEntities dereferencedEntities3 = new DereferencedEntities(Map.of(referenceTerm3, Collections.emptyList()),
- Set.of(expectedReportPlace), PlaceType.class);
-
- return List.of(dereferencedEntities1, dereferencedEntities2, dereferencedEntities3);
- }
+ private void verifyMergeExceptionFlow(EntityMergeEngine entityMergeEngine) throws MalformedURLException {
+ ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass((Class) List.class);
+ List expectedList = prepareExpectedListMergeNull();
+ verify(entityMergeEngine, times(1))
+ .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
+ assertEquals(expectedList.size(), argumentCaptor.getValue().size());
+ for (int i = 0; i < expectedList.size(); i++) {
+ DereferencedEntities expectedElement = expectedList.get(i);
+ DereferencedEntities capturedElement = argumentCaptor.getValue().get(i);
+ assertEquals(expectedElement.getClassType(), capturedElement.getClassType());
+ assertEquals(expectedElement.getReportMessages().size(), capturedElement.getReportMessages().size());
+
+ }
+ }
+
+ private void verifyMergeNullFlow(EntityMergeEngine entityMergeEngine) {
+ ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass((Class) List.class);
+ verify(entityMergeEngine, times(1))
+ .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture());
+ assertEquals(1, argumentCaptor.getValue().size());
+ assertNull(argumentCaptor.getValue().get(0).getClassType());
+ assertTrue(argumentCaptor.getValue().get(0).getReferenceTermListMap().isEmpty());
+ assertTrue(argumentCaptor.getValue().get(0).getReportMessages().isEmpty());
+ }
+
+ private List prepareExpectedList() throws MalformedURLException {
+ ReferenceTermImpl expectedReferenceTerm1 = new ReferenceTermImpl(new URL("http://data.europeana.eu.host/concept"));
+ Set expectedReports1 = Set.of(Report.buildDereferenceError()
+ .withValue("http://data.europeana.eu.host/concept")
+ .withMessage("Dereference or Coreferencing failed."),
+ Report.buildDereferenceWarn()
+ .withStatus(HttpStatus.OK)
+ .withValue("http://data.europeana.eu.host/concept")
+ .withMessage("Dereferencing or Coreferencing: the europeana entity does not exist."));
+ DereferencedEntities expectedDereferencedEntities1 = new DereferencedEntities(
+ Map.of(expectedReferenceTerm1, new ArrayList<>()),
+ expectedReports1, Concept.class);
+
+ ReferenceTermImpl expectedReferenceTerm2 = new ReferenceTermImpl(new URL("http://valid-example.host/place"));
+ List expectedEnrichmentBaseList2 = new ArrayList<>();
+ expectedEnrichmentBaseList2.add(
+ DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0));
+ expectedEnrichmentBaseList2.add(
+ DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(1));
+ expectedEnrichmentBaseList2.add(null);
+ DereferencedEntities expectedDereferencedEntities2 = new DereferencedEntities(
+ Map.of(expectedReferenceTerm2, expectedEnrichmentBaseList2),
+ Collections.emptySet(), PlaceType.class);
+
+ ReferenceTermImpl expectedReferenceTerm3 = new ReferenceTermImpl(new URL("http://valid-example.host/about"));
+ List expectedEnrichmentBaseList3 = new ArrayList<>();
+ expectedEnrichmentBaseList3.add(
+ DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0));
+ expectedEnrichmentBaseList3.add(null);
+ expectedEnrichmentBaseList3.add(
+ DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(2));
+ DereferencedEntities expectedDereferencedEntities3 = new DereferencedEntities(
+ Map.of(expectedReferenceTerm3, expectedEnrichmentBaseList3),
+ Collections.emptySet(), AboutType.class);
+
+ return List.of(expectedDereferencedEntities3, expectedDereferencedEntities1, expectedDereferencedEntities2);
+ }
+
+ private List prepareExpectedListMergeNull() throws MalformedURLException {
+ Report expectedReportConcept = Report.buildDereferenceWarn()
+ .withStatus(HttpStatus.BAD_REQUEST)
+ .withValue("http://valid-example.host/concept")
+ .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
+ Report expectedReportPlace = Report.buildDereferenceWarn()
+ .withStatus(HttpStatus.BAD_REQUEST)
+ .withValue("http://valid-example.host/place")
+ .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
+ Report expectedReportAbout = Report.buildDereferenceWarn()
+ .withStatus(HttpStatus.BAD_REQUEST)
+ .withValue("http://valid-example.host/about")
+ .withMessage("HttpClientErrorException.BadRequest: 400 Bad Request");
+
+ ReferenceTermImpl referenceTerm1 = new ReferenceTermImpl(new URL("http://valid-example.host/about"));
+ ReferenceTermImpl referenceTerm2 = new ReferenceTermImpl(new URL("http://valid-example.host/concept"));
+ ReferenceTermImpl referenceTerm3 = new ReferenceTermImpl(new URL("http://valid-example.host/place"));
+
+ DereferencedEntities dereferencedEntities1 = new DereferencedEntities(Map.of(referenceTerm1, Collections.emptyList()),
+ Set.of(expectedReportAbout), AboutType.class);
+ DereferencedEntities dereferencedEntities2 = new DereferencedEntities(Map.of(referenceTerm2, Collections.emptyList()),
+ Set.of(expectedReportConcept), Concept.class);
+ DereferencedEntities dereferencedEntities3 = new DereferencedEntities(Map.of(referenceTerm3, Collections.emptyList()),
+ Set.of(expectedReportPlace), PlaceType.class);
+
+ return List.of(dereferencedEntities1, dereferencedEntities2, dereferencedEntities3);
+ }
}
diff --git a/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-entity.xml b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-failure.xml
similarity index 91%
rename from metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-entity.xml
rename to metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-failure.xml
index 927a79c294..116a2715da 100644
--- a/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-entity.xml
+++ b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-failure.xml
@@ -1,6 +1,6 @@
- UNKNOWN_ENTITY
+ FAILURE
diff --git a/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-vocabulary.xml b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-vocabulary.xml
new file mode 100644
index 0000000000..3ebf33d493
--- /dev/null
+++ b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-no-vocabulary.xml
@@ -0,0 +1,6 @@
+
+
+
+ NO_VOCABULARY_MATCHING
+
+
diff --git a/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-normal-ii.xml b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-normal-ii.xml
new file mode 100644
index 0000000000..6917a69138
--- /dev/null
+++ b/metis-enrichment/metis-enrichment-client/src/test/resources/dereference/dereference-normal-ii.xml
@@ -0,0 +1,83 @@
+
+
+
+
+