diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4fef9f628f..73b182aee2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,16 +29,6 @@ jobs: secrets: DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} - docker-build_metis-core: - needs: ci - uses: europeana/metis-actions/.github/workflows/docker-build-push.yml@main - with: - docker-organization: europeana - docker-image-name: metis-core - project-path: metis-core/metis-core-rest/ - secrets: - DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} - DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} docker-build_metis-dereference: needs: ci uses: europeana/metis-actions/.github/workflows/docker-build-push.yml@main diff --git a/.run/metis-core-rest_Dockerfile.run.xml b/.run/metis-core-rest_Dockerfile.run.xml deleted file mode 100644 index 000659c0d2..0000000000 --- a/.run/metis-core-rest_Dockerfile.run.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/.run/metis-core.run.xml b/.run/metis-core.run.xml deleted file mode 100644 index 9a958f998d..0000000000 --- a/.run/metis-core.run.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - \ No newline at end of file diff --git a/.run/metis-debias.run.xml b/.run/metis-debias.run.xml new file mode 100644 index 0000000000..b91a56d24d --- /dev/null +++ b/.run/metis-debias.run.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/metis-authentication/metis-authentication-common/pom.xml b/metis-authentication/metis-authentication-common/pom.xml index ce4c0e6478..68c37759e2 100644 --- a/metis-authentication/metis-authentication-common/pom.xml +++ b/metis-authentication/metis-authentication-common/pom.xml @@ -4,7 +4,7 @@ metis-authentication eu.europeana.metis - 12.2 + 13 metis-authentication-common diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/AccountRole.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/AccountRole.java index e32035e86d..3d6b1ad098 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/AccountRole.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/AccountRole.java @@ -5,9 +5,6 @@ /** * The Role of an account. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-31 */ public enum AccountRole { METIS_ADMIN, EUROPEANA_DATA_OFFICER, PROVIDER_VIEWER; diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/Credentials.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/Credentials.java index 12d79f1eff..aa247164bd 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/Credentials.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/Credentials.java @@ -3,8 +3,6 @@ /** * Contains the email and password of a user temporarily. *

Used mostly after decoding the Authorization Header in an HTTP request.

- * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-14 */ public class Credentials { diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/EmailParameter.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/EmailParameter.java index 1fc748a6c5..0e7f3c91c5 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/EmailParameter.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/EmailParameter.java @@ -4,9 +4,6 @@ * Contains the email parameter. *

This class is used for passing parameters as json to a http request body. It contains the * email parameter

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-02-07 */ public class EmailParameter { diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUser.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUser.java index e765b92396..ffb10398b8 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUser.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUser.java @@ -16,9 +16,6 @@ /** * The Metis user containing all parameters. *

This class is used as a JPA class to the postgresql database

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-27 */ @Entity @Table(name = "metis_users") diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUserAccessToken.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUserAccessToken.java index 1cbe44458a..dde987d376 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUserAccessToken.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/MetisUserAccessToken.java @@ -11,9 +11,6 @@ /** * The token of a user, which related to an email of that user. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-30 */ @Entity @Table(name = "metis_user_access_tokens") diff --git a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/OldNewPasswordParameters.java b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/OldNewPasswordParameters.java index cc84c20a77..c4b61b7a4b 100644 --- a/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/OldNewPasswordParameters.java +++ b/metis-authentication/metis-authentication-common/src/main/java/eu/europeana/metis/authentication/user/OldNewPasswordParameters.java @@ -4,9 +4,6 @@ * Contains the old and new password parameters. *

This class is used for passing parameters as json to a http request body. It contains the old * password to be changed with the new provided password

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-02-07 */ public class OldNewPasswordParameters { diff --git a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserAccessTokenTest.java b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserAccessTokenTest.java index 98bff9b2aa..888c7b69f8 100644 --- a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserAccessTokenTest.java +++ b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserAccessTokenTest.java @@ -6,10 +6,6 @@ import java.util.Date; import org.junit.jupiter.api.Test; -/** - * @author Simon Tzanakis - * @since 2020-09-11 - */ class MetisUserAccessTokenTest { @Test diff --git a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserTest.java b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserTest.java index 1a46209c1d..a59e233b1d 100644 --- a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserTest.java +++ b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserTest.java @@ -8,10 +8,6 @@ import java.util.Date; import org.junit.jupiter.api.Test; -/** - * @author Simon Tzanakis - * @since 2020-09-11 - */ class MetisUserTest { @Test diff --git a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserViewTest.java b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserViewTest.java index 3175338557..c77d093979 100644 --- a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserViewTest.java +++ b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/MetisUserViewTest.java @@ -8,10 +8,6 @@ import java.util.Date; import org.junit.jupiter.api.Test; -/** - * @author Simon Tzanakis - * @since 2020-09-11 - */ class MetisUserViewTest { @Test diff --git a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/TestAccountRole.java b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/TestAccountRole.java index 51e55090f5..877516eeb0 100644 --- a/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/TestAccountRole.java +++ b/metis-authentication/metis-authentication-common/src/test/java/eu/europeana/metis/authentication/user/TestAccountRole.java @@ -6,10 +6,6 @@ import eu.europeana.metis.exception.BadContentException; import org.junit.jupiter.api.Test; -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-11-03 - */ class TestAccountRole { @Test diff --git a/metis-authentication/metis-authentication-rest-client/pom.xml b/metis-authentication/metis-authentication-rest-client/pom.xml index 923e5b4ca4..00591a2312 100644 --- a/metis-authentication/metis-authentication-rest-client/pom.xml +++ b/metis-authentication/metis-authentication-rest-client/pom.xml @@ -4,7 +4,7 @@ metis-authentication eu.europeana.metis - 12.2 + 13 metis-authentication-rest-client diff --git a/metis-authentication/metis-authentication-rest/pom.xml b/metis-authentication/metis-authentication-rest/pom.xml index 8ea4ac7506..c40c54c227 100644 --- a/metis-authentication/metis-authentication-rest/pom.xml +++ b/metis-authentication/metis-authentication-rest/pom.xml @@ -4,7 +4,7 @@ metis-authentication eu.europeana.metis - 12.2 + 13 metis-authentication-rest diff --git a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java index 6d57a6731e..98a201954d 100644 --- a/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java +++ b/metis-authentication/metis-authentication-rest/src/main/java/eu/europeana/metis/authentication/rest/controller/AuthenticationController.java @@ -1,6 +1,6 @@ package eu.europeana.metis.authentication.rest.controller; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; import eu.europeana.metis.authentication.service.AuthenticationService; import eu.europeana.metis.authentication.user.AccountRole; @@ -181,8 +181,7 @@ public void deleteUser(@RequestHeader("Authorization") String authorization, } authenticationService.deleteUser(emailParameter.getEmail()); if (LOGGER.isInfoEnabled()) { - LOGGER.info("User with email: {} deleted", - CRLF_PATTERN.matcher(emailParameter.getEmail()).replaceAll("")); + LOGGER.info("User with email: {} deleted", sanitizeCRLF(emailParameter.getEmail())); } } @@ -208,15 +207,13 @@ public void updateUserToMakeAdmin(@RequestHeader("Authorization") String authori if (emailParameter == null || StringUtils.isBlank(emailParameter.getEmail())) { throw new BadContentException("userEmailToMakeAdmin is empty"); } - String accessToken = authenticationService - .validateAuthorizationHeaderWithAccessToken(authorization); + final String accessToken = authenticationService.validateAuthorizationHeaderWithAccessToken(authorization); if (!authenticationService.isUserAdmin(accessToken)) { throw new UserUnauthorizedException(ACTION_NOT_ALLOWED_FOR_USER); } authenticationService.updateUserMakeAdmin(emailParameter.getEmail()); if (LOGGER.isInfoEnabled()) { - LOGGER.info("User with email: {} made admin", - CRLF_PATTERN.matcher(emailParameter.getEmail()).replaceAll("")); + LOGGER.info("User with email: {} made admin", sanitizeCRLF(emailParameter.getEmail())); } } diff --git a/metis-authentication/metis-authentication-service/pom.xml b/metis-authentication/metis-authentication-service/pom.xml index 4c5c9aa713..34c26e2d1b 100644 --- a/metis-authentication/metis-authentication-service/pom.xml +++ b/metis-authentication/metis-authentication-service/pom.xml @@ -4,7 +4,7 @@ metis-authentication eu.europeana.metis - 12.2 + 13 metis-authentication-service diff --git a/metis-authentication/pom.xml b/metis-authentication/pom.xml index ba73e3b5f6..4c54028ef6 100644 --- a/metis-authentication/pom.xml +++ b/metis-authentication/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-authentication pom diff --git a/metis-common/metis-common-base/pom.xml b/metis-common/metis-common-base/pom.xml index 7c639f5634..84a04cf272 100644 --- a/metis-common/metis-common-base/pom.xml +++ b/metis-common/metis-common-base/pom.xml @@ -4,9 +4,7 @@ eu.europeana.metis metis-common - 12.2 + 13 metis-common-base - - metis-common-base diff --git a/metis-common/metis-common-mongo/pom.xml b/metis-common/metis-common-mongo/pom.xml index 4bc4c6c609..4441b0cf1f 100644 --- a/metis-common/metis-common-mongo/pom.xml +++ b/metis-common/metis-common-mongo/pom.xml @@ -4,7 +4,7 @@ metis-common eu.europeana.metis - 12.2 + 13 metis-common-mongo diff --git a/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/dao/RecordDao.java b/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/dao/RecordDao.java index a399d85278..b06694285e 100644 --- a/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/dao/RecordDao.java +++ b/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/dao/RecordDao.java @@ -22,6 +22,7 @@ import eu.europeana.corelib.solr.entity.AgentImpl; import eu.europeana.corelib.solr.entity.AggregationImpl; import eu.europeana.corelib.solr.entity.BasicProxyImpl; +import eu.europeana.corelib.solr.entity.ChangeLogImpl; import eu.europeana.corelib.solr.entity.ConceptImpl; import eu.europeana.corelib.solr.entity.ConceptSchemeImpl; import eu.europeana.corelib.solr.entity.DatasetImpl; @@ -95,6 +96,7 @@ private Datastore createDatastore(MongoClient mongoClient, String databaseName) mapper.getEntityModel(TimespanImpl.class); mapper.getEntityModel(WebResourceImpl.class); mapper.getEntityModel(EuropeanaAggregationImpl.class); + mapper.getEntityModel(ChangeLogImpl.class); mapper.getEntityModel(EventImpl.class); mapper.getEntityModel(PhysicalThingImpl.class); mapper.getEntityModel(ConceptSchemeImpl.class); diff --git a/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/utils/MorphiaUtils.java b/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/utils/MorphiaUtils.java index 6d2a83c472..e8a73d23f8 100644 --- a/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/utils/MorphiaUtils.java +++ b/metis-common/metis-common-mongo/src/main/java/eu/europeana/metis/mongo/utils/MorphiaUtils.java @@ -97,7 +97,6 @@ public static List getListOfAggregationRetryable(Aggregation aggreg * @param the type of class that the {@link Query} represents * @return the morphia cursor */ - @SuppressWarnings("resource") private static BiFunction, FindOptions, MorphiaCursor> getMorphiaCursorFromQuery() { return (querySupplied, findOptionsSupplied) -> Optional.ofNullable(findOptionsSupplied) .map(querySupplied::iterator).orElseGet(querySupplied::iterator); @@ -114,7 +113,6 @@ private static BiFunction, FindOptions, MorphiaCursor> getMorphi * @param the type of class that the result of the {@link Aggregation} represents * @return the morphia cursor */ - @SuppressWarnings("resource") private static BiFunction, AggregationOptions, MorphiaCursor> getMorphiaCursorFromAggregation( Class resultObjectClass) { return (aggregationSupplied, aggregationOptionsSupplied) -> Optional diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoClientProviderTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoClientProviderTest.java index 145bba9fc0..04dedfc56d 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoClientProviderTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoClientProviderTest.java @@ -2,8 +2,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import com.mongodb.MongoClientSettings; import com.mongodb.ReadPreference; @@ -24,7 +24,7 @@ */ class MongoClientProviderTest { - private final static String DATABASE_NAME = "dbTest"; + private static final String DATABASE_NAME = "dbTest"; private static EmbeddedLocalhostMongo embeddedLocalhostMongo; @@ -39,7 +39,7 @@ static void tearDown() { embeddedLocalhostMongo.stop(); } - private static MongoProperties getMongoProperties() { + private static MongoProperties getMongoProperties() { final String mongoHost = embeddedLocalhostMongo.getMongoHost(); final int mongoPort = embeddedLocalhostMongo.getMongoPort(); final MongoProperties mongoProperties = new MongoProperties<>( @@ -79,7 +79,7 @@ void createAsSupplier() { final Supplier mongoClientSupplier = MongoClientProvider.createAsSupplier(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - assertTrue(mongoClientSupplier.get() instanceof MongoClient); + assertInstanceOf(MongoClient.class, mongoClientSupplier.get()); } @Test @@ -95,7 +95,7 @@ void createMongoClient() { final MongoClient mongoClient = new MongoClientProvider(getMongoProperties()).createMongoClient(); assertNotNull(mongoClient); - assertTrue(mongoClient instanceof MongoClient); + assertInstanceOf(MongoClient.class, mongoClient); } @Test diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoPropertiesTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoPropertiesTest.java index 321694e9af..77d2e5f952 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoPropertiesTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/connection/MongoPropertiesTest.java @@ -94,7 +94,7 @@ void addMongoHost() throws Exception { } private static void assertMongoPropertiesOnlyWithAuthentication(MongoProperties mongoProperties) throws Exception { - assertEquals("localhost:8521", mongoProperties.getMongoHosts().get(0).toString()); + assertEquals("localhost:8521", mongoProperties.getMongoHosts().getFirst().toString()); assertNotNull(mongoProperties.getMongoCredentials()); assertEquals("authenticationdb", mongoProperties.getMongoCredentials().getSource()); assertEquals("userName", mongoProperties.getMongoCredentials().getUserName()); @@ -102,7 +102,7 @@ private static void assertMongoPropertiesOnlyWithAuthentication(MongoProperties } private static void assertMongoPropertiesWithoutAuthentication(MongoProperties mongoProperties) throws Exception { - assertEquals("localhost:8521", mongoProperties.getMongoHosts().get(0).toString()); + assertEquals("localhost:8521", mongoProperties.getMongoHosts().getFirst().toString()); assertNull(mongoProperties.getMongoCredentials()); } diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordDaoTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordDaoTest.java index fe5f6497cc..ee6313c363 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordDaoTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordDaoTest.java @@ -57,7 +57,7 @@ */ class RecordDaoTest { - private final static String DATABASE_NAME = "dbTest"; + private static final String DATABASE_NAME = "dbTest"; private static RecordDao recordDao; diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordRedirectDaoTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordRedirectDaoTest.java index 64e547345d..bbdb97e8de 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordRedirectDaoTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/dao/RecordRedirectDaoTest.java @@ -7,7 +7,6 @@ import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; import dev.morphia.Datastore; -import eu.europeana.metis.mongo.dao.RecordRedirectDao; import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; import eu.europeana.metis.mongo.model.RecordRedirect; import java.time.Instant; @@ -26,7 +25,7 @@ */ class RecordRedirectDaoTest { - private final static String DATABASE_NAME = "dbTest"; + private static final String DATABASE_NAME = "dbTest"; private static RecordRedirectDao recordRedirectDao; @@ -91,7 +90,7 @@ void getRecordRedirectsByNewId() { List recordRedirectList = recordRedirectDao.getRecordRedirectsByNewId("61eec080f582833f364dad08"); assertEquals(1, recordRedirectList.size()); - assertEquals(recordRedirect.getNewId(), recordRedirectList.get(0).getNewId()); + assertEquals(recordRedirect.getNewId(), recordRedirectList.getFirst().getNewId()); } @Test diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/MorphiaUtilsTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/MorphiaUtilsTest.java index 4f20914577..112fd8e66f 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/MorphiaUtilsTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/MorphiaUtilsTest.java @@ -18,15 +18,11 @@ import dev.morphia.query.Query; import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; import eu.europeana.metis.mongo.model.HasMongoObjectId; -import eu.europeana.metis.mongo.utils.MorphiaUtils; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; import java.util.List; import java.util.Objects; import org.bson.types.ObjectId; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** @@ -37,7 +33,7 @@ */ class MorphiaUtilsTest { - private final static String DATABASE_NAME = "dbTest"; + private static final String DATABASE_NAME = "dbTest"; private static Datastore datastore; @@ -92,7 +88,7 @@ void getListOfAggregationRetryable() { final List dummyEntityList = MorphiaUtils.getListOfAggregationRetryable(aggregation, DummyEntity.class); assertEquals(1, dummyEntityList.size()); - assertEquals(5, Integer.valueOf(dummyEntityList.get(0).name)); + assertEquals(5, Integer.valueOf(dummyEntityList.getFirst().name)); } @Test @@ -107,7 +103,7 @@ void testGetListOfAggregationRetryable() { aggregationOptions); assertEquals(1, dummyEntityList.size()); - assertEquals(5, Integer.valueOf(dummyEntityList.get(0).name)); + assertEquals(5, Integer.valueOf(dummyEntityList.getFirst().name)); } private static void addEntitiesToDatastore() { @@ -180,12 +176,10 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof DummyEntity)) { + if (!(o instanceof DummyEntity that)) { return false; } - final DummyEntity that = (DummyEntity) o; - if (!Objects.equals(id, that.id)) { return false; } diff --git a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/ObjectIdSerializerTest.java b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/ObjectIdSerializerTest.java index fa20b4881a..849af3bbdf 100644 --- a/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/ObjectIdSerializerTest.java +++ b/metis-common/metis-common-mongo/src/test/java/eu/europeana/metis/mongo/utils/ObjectIdSerializerTest.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; @@ -55,8 +54,7 @@ void serializeNull_expectSuccess() throws IOException { } private static JsonGenerator getJsonGenerator(Writer writer) throws IOException { - final JsonGenerator jsonGenerator = JsonFactory.builder().build().createGenerator(writer); - return jsonGenerator; + return JsonFactory.builder().build().createGenerator(writer); } private static SerializerProvider getSerializerProvider() { diff --git a/metis-common/metis-common-network/pom.xml b/metis-common/metis-common-network/pom.xml index b5412aa494..27e9526500 100644 --- a/metis-common/metis-common-network/pom.xml +++ b/metis-common/metis-common-network/pom.xml @@ -4,7 +4,7 @@ metis-common eu.europeana.metis - 12.2 + 13 metis-common-network diff --git a/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/AbstractHttpClient.java b/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/AbstractHttpClient.java index b05861a133..aa7c9c0cba 100644 --- a/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/AbstractHttpClient.java +++ b/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/AbstractHttpClient.java @@ -5,6 +5,7 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; +import java.lang.invoke.MethodHandles; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; @@ -56,7 +57,7 @@ */ public abstract class AbstractHttpClient implements Closeable { - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractHttpClient.class); + private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final int HTTP_SUCCESS_MIN_INCLUSIVE = HttpStatus.SC_OK; private static final int HTTP_SUCCESS_MAX_EXCLUSIVE = HttpStatus.SC_MULTIPLE_CHOICES; @@ -194,16 +195,7 @@ public void run() { final RedirectLocations redirectUris = context.getRedirectLocations(); final URI actualUri = (redirectUris == null || redirectUris.size() == 0) ? httpGet.getUri() : redirectUris.get(redirectUris.size() - 1); - final ContentDisposition contentDisposition = Optional.ofNullable(responseObject).map( re -> { - try { - return re.getHeader("Content-Disposition") != null ? - ContentDisposition.parse(re.getHeader("Content-Disposition").getValue()) : null; - } catch (ProtocolException ex) { - LOGGER.debug("No content-disposition header, nothing to do", ex); - return null; - } - } - ).orElse(null); + final ContentDisposition contentDisposition = getContentDisposition(responseObject); // Obtain the result (check for timeout just in case). final ContentRetriever contentRetriever = ContentRetriever.forNonCloseableContent( responseEntity == null ? InputStream::nullInputStream : responseEntity::getContent, @@ -247,6 +239,20 @@ public void run() { } } + private static ContentDisposition getContentDisposition(CloseableHttpResponse responseObject) { + return Optional.ofNullable(responseObject).map(re -> { + try { + return Optional.ofNullable(re.getHeader("Content-Disposition")) + .map(header -> ContentDisposition.parse(header.getValue())) + .orElse(null); + } catch (ProtocolException ex) { + LOGGER.debug("No content-disposition header, nothing to do", ex); + return null; + } + } + ).orElse(null); + } + private static boolean httpCallIsSuccessful(int status) { return status >= HTTP_SUCCESS_MIN_INCLUSIVE && status < HTTP_SUCCESS_MAX_EXCLUSIVE; } diff --git a/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/ExternalRequestUtil.java b/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/ExternalRequestUtil.java index 820d17db81..f415e4d5aa 100644 --- a/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/ExternalRequestUtil.java +++ b/metis-common/metis-common-network/src/main/java/eu/europeana/metis/network/ExternalRequestUtil.java @@ -22,8 +22,6 @@ * A utilities class used to encapsulate methods that throw exceptions {@link RuntimeException} or * {@link Exception} and should follow retries logic based on specific caused exceptions. * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-07-24 */ public final class ExternalRequestUtil { diff --git a/metis-common/metis-common-solr/pom.xml b/metis-common/metis-common-solr/pom.xml index 6aa4508267..8342e7bd33 100644 --- a/metis-common/metis-common-solr/pom.xml +++ b/metis-common/metis-common-solr/pom.xml @@ -4,7 +4,7 @@ metis-common eu.europeana.metis - 12.2 + 13 metis-common-solr diff --git a/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/client/CompoundSolrClientTest.java b/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/client/CompoundSolrClientTest.java index b86bdd1415..48771468ff 100644 --- a/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/client/CompoundSolrClientTest.java +++ b/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/client/CompoundSolrClientTest.java @@ -2,7 +2,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -10,7 +9,6 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.LBHttpSolrClient; import org.junit.jupiter.api.Test; -import org.mockito.Mockito; /** * Unit test for {@link CompoundSolrClient} diff --git a/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/connection/SolrPropertiesTest.java b/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/connection/SolrPropertiesTest.java index 3638541e20..fe9ce8fa58 100644 --- a/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/connection/SolrPropertiesTest.java +++ b/metis-common/metis-common-solr/src/test/java/eu/europeana/metis/solr/connection/SolrPropertiesTest.java @@ -134,7 +134,7 @@ void addSolrHost() throws Exception { List actualSolrHosts = solrProperties.getSolrHosts(); assertEquals(1, actualSolrHosts.size()); - assertEquals(new URI("http://localhost:8983/solr"), actualSolrHosts.get(0)); + assertEquals(new URI("http://localhost:8983/solr"), actualSolrHosts.getFirst()); } @Test diff --git a/metis-common/metis-common-spring-properties/pom.xml b/metis-common/metis-common-spring-properties/pom.xml index 9067ca1671..cb70bf0795 100644 --- a/metis-common/metis-common-spring-properties/pom.xml +++ b/metis-common/metis-common-spring-properties/pom.xml @@ -4,7 +4,7 @@ eu.europeana.metis metis-common - 12.2 + 13 metis-common-spring-properties diff --git a/metis-common/metis-common-utils/pom.xml b/metis-common/metis-common-utils/pom.xml index 1234366f54..f69717d55e 100644 --- a/metis-common/metis-common-utils/pom.xml +++ b/metis-common/metis-common-utils/pom.xml @@ -4,7 +4,7 @@ metis-common eu.europeana.metis - 12.2 + 13 metis-common-utils @@ -20,6 +20,7 @@ org.springframework.boot spring-boot-configuration-processor + runtime true diff --git a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/CommonStringValues.java b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/CommonStringValues.java index acc135504c..d618079cec 100644 --- a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/CommonStringValues.java +++ b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/CommonStringValues.java @@ -24,8 +24,19 @@ public final class CommonStringValues { public static final String REPLACEABLE_CRLF_CHARACTERS_REGEX = "[\r\n\t]"; - public static final Pattern CRLF_PATTERN = Pattern.compile(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX); + public static final Pattern CRLF_PATTERN = Pattern.compile(REPLACEABLE_CRLF_CHARACTERS_REGEX); private CommonStringValues() { } + + /** + * Sanitized input value from Logging injection attacks(javasecurity:S5145). + *

Replaces CR and LF characters with a safe value e.g. ""(empty string).

+ * + * @param input the input + * @return the sanitized input, safe for logging + */ + public static String sanitizeCRLF(String input) { + return input == null ? null : CRLF_PATTERN.matcher(input).replaceAll(""); + } } diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Country.java b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/Country.java similarity index 85% rename from metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Country.java rename to metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/Country.java index ac842da5a9..51520ed921 100644 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Country.java +++ b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/Country.java @@ -1,4 +1,4 @@ -package eu.europeana.metis.core.common; +package eu.europeana.metis.utils; import java.util.Arrays; import java.util.Comparator; @@ -120,6 +120,28 @@ public static Country getCountryFromIsoCode(String isoCode) { return null; } + /** + * Method that returns the enum with the corresponding given string value + * @param countryName The string value to match the enum values with + * @return The enum country values that matches the given string value + */ + public static Country fromCountryNameToIsoCode(String countryName){ + Country result = null; + for(Country country : values()){ + if(country.getName().equals(countryName)){ + result = country; + break; + } + } + + if(result == null){ + throw new IllegalArgumentException("Country name "+countryName+" not found"); + } + + return result; + + } + /** * Provides the countries sorted by the {@link #getName()} field * diff --git a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/DepublicationReason.java b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/DepublicationReason.java new file mode 100644 index 0000000000..6db4babeee --- /dev/null +++ b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/DepublicationReason.java @@ -0,0 +1,42 @@ +package eu.europeana.metis.utils; + +/** + * Enum for depublication reason. + *

Note: The enum value {@link #LEGACY} is to be used for historical depublication workflows(before the reason was + * implemented). In other words the historical workflows will be populated by a script once with the {@link #LEGACY} reason, and + * this value should never be used during depublication since its release. At the time of writing the url String is not meant to + * be used for populating records in the database(e.g. tombstoning)

+ */ +public enum DepublicationReason { + + BROKEN_MEDIA_LINKS("Broken media links", "contentTier0"), + GDPR("GDPR", "gdpr"), + PERMISSION_ISSUES("Permission issues", "noPermission"), + SENSITIVE_CONTENT("Sensitive content", "sensitiveContent"), + REMOVED_DATA_AT_SOURCE("Removed data at source", "sourceRemoval"), + GENERIC("Generic", "generic"), + LEGACY("Legacy", "legacy"); + + private static final String BASE_URL = "http://data.europeana.eu/vocabulary/depublicationReason/"; + + private final String title; + private final String url; + + DepublicationReason(String title, String urlSuffix) { + this.title = title; + this.url = BASE_URL + urlSuffix; + } + + @Override + public String toString() { + return title; + } + + public String getTitle() { + return title; + } + + public String getUrl() { + return url; + } +} diff --git a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RdfNamespaceContext.java b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RdfNamespaceContext.java index 7ccb90aac4..a63e89673a 100644 --- a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RdfNamespaceContext.java +++ b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RdfNamespaceContext.java @@ -19,6 +19,8 @@ public class RdfNamespaceContext implements NamespaceContext { public static final String RDF_NAMESPACE_PREFIX = "rdf"; public static final String EDM_NAMESPACE_PREFIX = "edm"; public static final String ORE_NAMESPACE_PREFIX = "ore"; + public static final String SVCS_NAMESPACE_PREFIX = "svcs"; + public static final String DCTERMS_NAMESPACE_PREFIX = "dcterms"; private static final Map PREFIX_TO_NAMESPACE_MAP = new HashMap<>(); @@ -30,6 +32,8 @@ public class RdfNamespaceContext implements NamespaceContext { PREFIX_TO_NAMESPACE_MAP.put(RDF_NAMESPACE_PREFIX, "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); PREFIX_TO_NAMESPACE_MAP.put(ORE_NAMESPACE_PREFIX, "http://www.openarchives.org/ore/terms/"); PREFIX_TO_NAMESPACE_MAP.put(EDM_NAMESPACE_PREFIX, "http://www.europeana.eu/schemas/edm/"); + PREFIX_TO_NAMESPACE_MAP.put(SVCS_NAMESPACE_PREFIX,"http://rdfs.org/sioc/services#"); + PREFIX_TO_NAMESPACE_MAP.put(DCTERMS_NAMESPACE_PREFIX, "http://purl.org/dc/terms/"); } @Override diff --git a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RestEndpoints.java b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RestEndpoints.java index 94ca888dd4..44e6446d6b 100644 --- a/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RestEndpoints.java +++ b/metis-common/metis-common-utils/src/main/java/eu/europeana/metis/utils/RestEndpoints.java @@ -30,6 +30,7 @@ public final class RestEndpoints { // DEPUBLISHED RECORDS public static final String DEPUBLISH_RECORDIDS_DATASETID = "/depublish/record_ids/{datasetId}"; public static final String DEPUBLISH_EXECUTE_DATASETID = "/depublish/execute/{datasetId}"; + public static final String DEPUBLISH_REASONS = "/depublish/reasons"; //AUTHENTICATION public static final String AUTHENTICATION_REGISTER = "/authentication/register"; @@ -87,6 +88,9 @@ public final class RestEndpoints { public static final String REPOSITORY_HTTP_ENDPOINT_ZIP = "/repository/zip/{dataset}.zip"; public static final String REPOSITORY_OAI_ENDPOINT = "/repository/oai"; + /* METIS-DEBIAS ENDPOINTS */ + public static final String DEBIAS_DETECTION = "/debias"; + private RestEndpoints() { } diff --git a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CommonStringValuesTest.java b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CommonStringValuesTest.java new file mode 100644 index 0000000000..5c04334144 --- /dev/null +++ b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CommonStringValuesTest.java @@ -0,0 +1,88 @@ +package eu.europeana.metis.utils; + +import static eu.europeana.metis.utils.CommonStringValues.BATCH_OF_DATASETS_RETURNED; +import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.DATE_FORMAT; +import static eu.europeana.metis.utils.CommonStringValues.DATE_FORMAT_FOR_REQUEST_PARAM; +import static eu.europeana.metis.utils.CommonStringValues.DATE_FORMAT_FOR_SCHEDULING; +import static eu.europeana.metis.utils.CommonStringValues.DATE_FORMAT_Z; +import static eu.europeana.metis.utils.CommonStringValues.EUROPEANA_ID_CREATOR_INITIALIZATION_FAILED; +import static eu.europeana.metis.utils.CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE; +import static eu.europeana.metis.utils.CommonStringValues.PAGE_COUNT_CANNOT_BE_ZERO_OR_NEGATIVE; +import static eu.europeana.metis.utils.CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED; +import static eu.europeana.metis.utils.CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX; +import static eu.europeana.metis.utils.CommonStringValues.S_DATA_PROVIDERS_S_DATA_SETS_S_TEMPLATE; +import static eu.europeana.metis.utils.CommonStringValues.UNAUTHORIZED; +import static eu.europeana.metis.utils.CommonStringValues.WRONG_ACCESS_TOKEN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.regex.Pattern; +import org.junit.jupiter.api.Test; + +class CommonStringValuesTest { + + @Test + void testFieldsAreUsed() { + assertNotNull(WRONG_ACCESS_TOKEN); + assertNotNull(BATCH_OF_DATASETS_RETURNED); + assertNotNull(NEXT_PAGE_CANNOT_BE_NEGATIVE); + assertNotNull(PAGE_COUNT_CANNOT_BE_ZERO_OR_NEGATIVE); + assertNotNull(PLUGIN_EXECUTION_NOT_ALLOWED); + assertNotNull(UNAUTHORIZED); + assertNotNull(EUROPEANA_ID_CREATOR_INITIALIZATION_FAILED); + assertNotNull(DATE_FORMAT); + assertNotNull(DATE_FORMAT_Z); + assertNotNull(DATE_FORMAT_FOR_SCHEDULING); + assertNotNull(DATE_FORMAT_FOR_REQUEST_PARAM); + assertNotNull(S_DATA_PROVIDERS_S_DATA_SETS_S_TEMPLATE); + assertNotNull(REPLACEABLE_CRLF_CHARACTERS_REGEX); + assertNotNull(CRLF_PATTERN); + } + + @Test + void testPattern() { + Pattern expectedPattern = Pattern.compile("[\r\n\t]"); + assertEquals(expectedPattern.pattern(), CRLF_PATTERN.pattern()); + } + + @Test + void testSanitizeCRLF_NullInput() { + assertNull(sanitizeCRLF(null)); + } + + @Test + void testSanitizeStringForLogging_EmptyString() { + String input = ""; + assertEquals("", sanitizeCRLF(input)); + } + + @Test + void testSanitizeCRLF_NoSpecialCharacters() { + String input = "This is a test."; + assertEquals("This is a test.", sanitizeCRLF(input)); + } + + @Test + void testSanitizeCRLF_WithCRLFCharacters() { + String input = "This is a test.\nThis is a new line.\rThis is a carriage return.\tThis is a tab."; + String expected = "This is a test.This is a new line.This is a carriage return.This is a tab."; + assertEquals(expected, sanitizeCRLF(input)); + } + + @Test + void testSanitizeCRLF_MixedInput() { + String input = "\r\n\tThis string has special characters at the start.\r\n"; + String expected = "This string has special characters at the start."; + assertEquals(expected, sanitizeCRLF(input)); + } + + @Test + void testSanitizeCRLF_NoCRLFCharacters() { + String input = "Regular string without CRLF."; + assertEquals("Regular string without CRLF.", sanitizeCRLF(input)); + } +} + diff --git a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CompressedFileHandlerGzTest.java b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CompressedFileHandlerGzTest.java index ac5a92f7a6..eb1abc89f4 100644 --- a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CompressedFileHandlerGzTest.java +++ b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/CompressedFileHandlerGzTest.java @@ -19,14 +19,14 @@ public class CompressedFileHandlerGzTest { public static final String FILE_EXTENSION = ".tar.gz"; - private final static String DESTINATION_DIR = String.format("src%1$stest%1$sresources%1$s__files%1$s", File.separator); - private final static int XML_FILES_COUNT = 13; - private final static String FILE_NAME = "gzFile"; - private final static String FILE_NAME2 = "gzFileWithCompressedGZFiles"; - private final static String FILE_NAME3 = "gzFilesWithMixedCompressedFiles"; - private final static String FILE_NAME4 = "gzFileWithSubdirContainingSpaceInName"; - private final static String XML_TYPE = "xml"; - private final static String DESTINATION_NAME_FOR_ZIP_WITH_SPACES = "zip_file"; + private static final String DESTINATION_DIR = String.format("src%1$stest%1$sresources%1$s__files%1$s", File.separator); + private static final int XML_FILES_COUNT = 13; + private static final String FILE_NAME = "gzFile"; + private static final String FILE_NAME2 = "gzFileWithCompressedGZFiles"; + private static final String FILE_NAME3 = "gzFilesWithMixedCompressedFiles"; + private static final String FILE_NAME4 = "gzFileWithSubdirContainingSpaceInName"; + private static final String XML_TYPE = "xml"; + private static final String DESTINATION_NAME_FOR_ZIP_WITH_SPACES = "zip_file"; @AfterAll public static void cleanUp() throws IOException { @@ -56,14 +56,6 @@ void shouldUnpackTheTarGzFilesRecursivelyWithCompressedXMLFiles() throws IOExcep assertEquals(XML_FILES_COUNT, files.size()); } - @Test - void shouldUnpackTheTGZFilesRecursivelyWithCompressedXMLFiles() throws IOException { - CompressedFileHandler.extractFile(Path.of(DESTINATION_DIR + FILE_NAME2 + FILE_EXTENSION), Path.of(DESTINATION_DIR)); - Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME2); - assertNotNull(files); - assertEquals(XML_FILES_COUNT, files.size()); - } - @Test void shouldUnpackTheTarGzFilesRecursivelyWithMixedNestedCompressedFiles() throws IOException { CompressedFileHandler.extractFile(Path.of(DESTINATION_DIR + FILE_NAME3 + FILE_EXTENSION), Path.of(DESTINATION_DIR)); diff --git a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/DepublicationReasonTest.java b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/DepublicationReasonTest.java new file mode 100644 index 0000000000..deecf5beec --- /dev/null +++ b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/DepublicationReasonTest.java @@ -0,0 +1,54 @@ +package eu.europeana.metis.utils; + +import static eu.europeana.metis.utils.DepublicationReason.BROKEN_MEDIA_LINKS; +import static eu.europeana.metis.utils.DepublicationReason.GDPR; +import static eu.europeana.metis.utils.DepublicationReason.GENERIC; +import static eu.europeana.metis.utils.DepublicationReason.PERMISSION_ISSUES; +import static eu.europeana.metis.utils.DepublicationReason.REMOVED_DATA_AT_SOURCE; +import static eu.europeana.metis.utils.DepublicationReason.SENSITIVE_CONTENT; +import static eu.europeana.metis.utils.DepublicationReason.LEGACY; +import static eu.europeana.metis.utils.DepublicationReason.values; +import static java.util.Arrays.asList; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; + +class DepublicationReasonTest { + + @Test + void testValues() { + Arrays.stream(values()).forEach(depublicationReason -> { + assertNotNull(depublicationReason.getTitle()); + assertNotNull(depublicationReason.getUrl()); + }); + } + + @Test + void testToStringMethod() { + assertEquals("Broken media links", BROKEN_MEDIA_LINKS.toString()); + assertEquals("GDPR", GDPR.toString()); + assertEquals("Permission issues", PERMISSION_ISSUES.toString()); + assertEquals("Sensitive content", SENSITIVE_CONTENT.toString()); + assertEquals("Removed data at source", REMOVED_DATA_AT_SOURCE.toString()); + assertEquals("Generic", GENERIC.toString()); + assertEquals("Legacy", LEGACY.toString()); + } + + @Test + void testEnumValuePresence() { + List depublicationReasons = asList(values()); + assertEquals(7, depublicationReasons.size()); + + assertTrue(depublicationReasons.contains(BROKEN_MEDIA_LINKS)); + assertTrue(depublicationReasons.contains(GDPR)); + assertTrue(depublicationReasons.contains(PERMISSION_ISSUES)); + assertTrue(depublicationReasons.contains(SENSITIVE_CONTENT)); + assertTrue(depublicationReasons.contains(REMOVED_DATA_AT_SOURCE)); + assertTrue(depublicationReasons.contains(GENERIC)); + assertTrue(depublicationReasons.contains(LEGACY)); + } +} \ No newline at end of file diff --git a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/SonarqubeNullcheckAvoidanceUtilsTest.java b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/SonarqubeNullcheckAvoidanceUtilsTest.java index b143dadfd5..e1ecf3065d 100644 --- a/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/SonarqubeNullcheckAvoidanceUtilsTest.java +++ b/metis-common/metis-common-utils/src/test/java/eu/europeana/metis/utils/SonarqubeNullcheckAvoidanceUtilsTest.java @@ -68,7 +68,7 @@ void testPerformThrowingFunctionReturn() { Integer actualValue = performThrowingFunction(number, x -> { integerList.add(x * 2); - return integerList.get(0); + return integerList.getFirst(); } ); assertEquals(24, actualValue); diff --git a/metis-common/pom.xml b/metis-common/pom.xml index dcc5f8a08f..7c02014cca 100644 --- a/metis-common/pom.xml +++ b/metis-common/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-common pom diff --git a/metis-core/metis-core-common/pom.xml b/metis-core/metis-core-common/pom.xml deleted file mode 100644 index bdc8aa18a7..0000000000 --- a/metis-core/metis-core-common/pom.xml +++ /dev/null @@ -1,99 +0,0 @@ - - - 4.0.0 - - metis-core - eu.europeana.metis - 12.2 - - metis-core-common - - - org.junit.jupiter - junit-jupiter-api - - - org.junit.jupiter - junit-jupiter-engine - - - org.junit.jupiter - junit-jupiter-params - ${version.junit} - test - - - eu.europeana.metis - metis-common-utils - ${project.version} - - - eu.europeana.metis - metis-common-mongo - ${project.version} - - - eu.europeana.corelib - corelib-web - - - - - eu.europeana.cloud - ecloud-service-dps-rest-client-java - ${version.ecloud} - - - org.slf4j - slf4j-log4j12 - - - - - dev.morphia.morphia - morphia-core - ${version.morphia.core} - - - com.fasterxml.jackson.core - jackson-annotations - ${version.jackson} - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - ${version.jackson} - - - com.fasterxml.jackson.datatype - jackson-datatype-jsr310 - ${version.jackson} - - - com.fasterxml.jackson.core - jackson-databind - ${version.jackson} - - - org.springframework - spring-web - - - org.apache.commons - commons-lang3 - - - - - - - org.springframework - spring-framework-bom - ${version.spring} - pom - import - - - - diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountryDeserializer.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountryDeserializer.java deleted file mode 100644 index 3bb78ab6aa..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountryDeserializer.java +++ /dev/null @@ -1,41 +0,0 @@ -package eu.europeana.metis.core.common; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import java.io.IOException; - -/** - * Deserializer for {@link Country} enum. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-09 - */ -public class CountryDeserializer extends StdDeserializer { - - private static final long serialVersionUID = 1L; - - /** - * Constructor for null value - */ - public CountryDeserializer() { - this(null); - } - - /** - * Required as part of {@link StdDeserializer} - * - * @param vc required parameter - */ - public CountryDeserializer(Class vc) { - super(vc); - } - - @Override - public Country deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) - throws IOException { - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - return Country.getCountryFromEnumName(node.get("enum").asText()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountrySerializer.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountrySerializer.java deleted file mode 100644 index a310d16be6..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/CountrySerializer.java +++ /dev/null @@ -1,39 +0,0 @@ -package eu.europeana.metis.core.common; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; - -/** - * Serializer for {@link Country} enum. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-09 - */ -public class CountrySerializer extends StdSerializer { - - private static final long serialVersionUID = 1L; - - /** - * Constructor for the {@link Country} json serializer. - */ - public CountrySerializer() { - super(Country.class); - } - - @Override - public void serialize(Country country, - JsonGenerator generator, - SerializerProvider provider) - throws IOException { - generator.writeStartObject(); - generator.writeFieldName("enum"); - generator.writeString(country.name()); - generator.writeFieldName("name"); - generator.writeString(country.getName()); - generator.writeFieldName("isoCode"); - generator.writeString(country.getIsoCode()); - generator.writeEndObject(); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/DaoFieldNames.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/DaoFieldNames.java deleted file mode 100644 index f4e5c6efa1..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/DaoFieldNames.java +++ /dev/null @@ -1,41 +0,0 @@ -package eu.europeana.metis.core.common; - -/** - * Enumeration that contains field names for dao queries. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-07-04 - */ -public enum DaoFieldNames { - ID("_id"), - DATASET_ID("datasetId"), - DATASET_NAME("datasetName"), - PROVIDER("provider"), - DATA_PROVIDER("dataProvider"), - WORKFLOW_NAME("workflowName"), - WORKFLOW_STATUS("workflowStatus"), - PLUGIN_STATUS("pluginStatus"), - PLUGIN_TYPE("pluginType"), - METIS_PLUGINS("metisPlugins"), - CREATED_DATE("createdDate"), - STARTED_DATE("startedDate"), - UPDATED_DATE("updatedDate"), - FINISHED_DATE("finishedDate"), - PLUGIN_METADATA("pluginMetadata"), - XSLT_ID("xsltId"); - - private final String fieldName; - - DaoFieldNames(String fieldName) { - this.fieldName = fieldName; - } - - public String getFieldName() { - return fieldName; - } - - @Override - public String toString() { - return fieldName; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/JavaTimeSerialization.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/JavaTimeSerialization.java deleted file mode 100644 index 6062318fb3..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/JavaTimeSerialization.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.common; - -import com.fasterxml.jackson.datatype.jsr310.ser.InstantSerializer; -import java.time.format.DateTimeFormatter; - -/** - * This class provides serialization and deserialization for the java 8 date and time api. - */ -public final class JavaTimeSerialization { - - private JavaTimeSerialization() { - } - - /** - * Serializer for {@link java.time.Instant} objects according to {@link - * DateTimeFormatter#ISO_INSTANT}. - */ - public static class IsoInstantSerializer extends InstantSerializer { - - private static final long serialVersionUID = -4172609679650500288L; - - public IsoInstantSerializer() { - super(InstantSerializer.INSTANCE, Boolean.FALSE, DateTimeFormatter.ISO_INSTANT); - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Language.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Language.java deleted file mode 100644 index 6506c05e02..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/Language.java +++ /dev/null @@ -1,60 +0,0 @@ -package eu.europeana.metis.core.common; - -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; - -/** - * The name of the dataset (enumerated) - * Created by ymamakis on 2/17/16. - */ -public enum Language { - - AR("Arabic"), AZ("Azerbaijani"), BE("Belarusian"), BG("Bulgarian"), BS("Bosnian"), CA( - "Catalan"), CNR("Montenegrin"), CS("Czech"), CY("Welsh"), DA("Danish"), DE("German"), EL( - "Greek"), EN("English"), ES("Spanish"), ET("Estonian"), EU("Basque"), FI("Finnish"), FR( - "French"), GA("Irish"), GD("Gaelic (Scottish)"), GL("Galician"), HE("Hebrew"), HI( - "Hindi"), HR("Croatian (hrvatski jezik)"), HU("Hungarian"), HY("Armenian"), IE( - "Interlingue"), IS("Icelandic"), IT("Italian"), JA("Japanese"), KA("Georgian"), KO( - "Korean"), LT("Lithuanian"), LV("Latvian (Lettish)"), MK("Macedonian"), MT("Maltese"), MUL( - "Multilingual Content"), NL("Dutch"), NO("Norwegian"), PL("Polish"), PT("Portugese"), RO( - "Romanian"), RU("Russian"), SK("Slovak"), SL("Slovenian"), SQ("Albanian"), SR("Serbian"), SV( - "Swedish"), TR("Turkish"), UK("Ukrainian"), YI("Yiddish"), ZH("Chinese"); - - private String name; - - Language(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - /** - * Lookup of a {@link Language} enum from a provided enum String representation of the enum value. - *

e.g. if provided enumName is EL then the returned Language will be Language.EL

- * - * @param enumName the String representation of an enum value - * @return the {@link Language} that represents the provided value or null if not found - */ - public static Language getLanguageFromEnumName(String enumName) { - for (Language language : Language.values()) { - if (language.name().equalsIgnoreCase(enumName)) { - return language; - } - } - return null; - } - - /** - * Provides the languages sorted by the {@link #getName()} field - * - * @return the list of languages sorted - */ - public static List getLanguageListSortedByName() { - List languages = Arrays.asList(Language.values()); - languages.sort(Comparator.comparing(Language::getName)); - return languages; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageDeserializer.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageDeserializer.java deleted file mode 100644 index 93a9eac18c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageDeserializer.java +++ /dev/null @@ -1,41 +0,0 @@ -package eu.europeana.metis.core.common; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import java.io.IOException; - -/** - * Deserializer for {@link Language} enum. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-09 - */ -public class LanguageDeserializer extends StdDeserializer { - - private static final long serialVersionUID = 1L; - - /** - * Constructor for null value - */ - public LanguageDeserializer() { - this(null); - } - - /** - * Required as part of {@link StdDeserializer} - * @param vc required parameter - */ - public LanguageDeserializer(Class vc) { - super(vc); - } - - @Override - public Language deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) - throws IOException { - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - return Language.getLanguageFromEnumName(node.get("enum").asText()); - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageSerializer.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageSerializer.java deleted file mode 100644 index a27c02a44e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/LanguageSerializer.java +++ /dev/null @@ -1,38 +0,0 @@ -package eu.europeana.metis.core.common; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; - -/** - * Serializer for {@link Language} enum. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-09 - */ -public class LanguageSerializer extends StdSerializer { - - private static final long serialVersionUID = 1L; - - /** - * Constructor for the {@link Language} json serializer. - */ - public LanguageSerializer() { - super(Language.class); - } - - @Override - public void serialize(Language language, - JsonGenerator generator, - SerializerProvider provider) - throws IOException { - generator.writeStartObject(); - generator.writeFieldName("enum"); - generator.writeString(language.name()); - generator.writeFieldName("name"); - generator.writeString(language.getName()); - generator.writeEndObject(); - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/RecordIdUtils.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/RecordIdUtils.java deleted file mode 100644 index 4b64d707ba..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/RecordIdUtils.java +++ /dev/null @@ -1,148 +0,0 @@ -package eu.europeana.metis.core.common; - -import eu.europeana.metis.exception.BadContentException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Collection; -import java.util.HashSet; -import java.util.Optional; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; - -/** - * This class contains functionality concerning the parsing and composing of depublish record IDs. - */ -public final class RecordIdUtils { - - private static final Pattern LINE_SEPARATION_PATTERN = Pattern.compile("\\R"); - private static final Pattern INVALID_CHAR_IN_RECORD_ID = Pattern.compile("\\W"); - private static final Pattern FULL_RECORD_ID_PATTERN = Pattern.compile("^/([^/\\s]+)/([^/\\s]+)$"); - - private RecordIdUtils() { - } - - /** - * Composes full record IDs. - * @param datasetId The ID of the dataset. - * @param recordIds The (simple) IDs of the records. - * @return The full/qualified IDs of the records. - */ - public static Set composeFullRecordIds(String datasetId, Collection recordIds) { - return recordIds.stream().map(recordId -> composeFullRecordId(datasetId, recordId)).collect( - Collectors.toSet()); - } - - /** - * Composes a full record ID. - * - * @param datasetId The ID of the dataset. - * @param recordId The (simple) ID of the record. - * @return The full/qualified ID of the record. - */ - public static String composeFullRecordId(String datasetId, String recordId) { - return "/" + datasetId + "/" + recordId; - } - - /** - * Decomposes a full record ID into the dataset ID and the simple record ID part. - * - * @param fullRecordId The full record ID. - * @return A String pair containing first the dataset ID, and second the simple record ID. - */ - public static Pair decomposeFullRecordId(String fullRecordId) { - final Matcher matcher = FULL_RECORD_ID_PATTERN.matcher(fullRecordId); - if (!matcher.find()) { - return null; - } - return new ImmutablePair<>(matcher.group(1), matcher.group(2)); - } - - /** - * This method checks/validates and normalizes incoming depublished record IDs for persistence. - * - * @param datasetId The dataset ID to which the depublished record belongs. - * @param recordIdsInSeparateLines The unchecked and non-normalized record ID, in a - * newline-separated string. The method accepts and ignores empty lines. - * @return The checked and normalized record IDs. - * @throws BadContentException In case any of the incoming record IDs does not validate. - */ - public static Set checkAndNormalizeRecordIds(String datasetId, - String recordIdsInSeparateLines) throws BadContentException { - final String[] recordIds = LINE_SEPARATION_PATTERN.split(recordIdsInSeparateLines); - final Set normalizedRecordIds = HashSet.newHashSet(recordIds.length); - for (String recordId : recordIds) { - checkAndNormalizeRecordId(datasetId, recordId).ifPresent(normalizedRecordIds::add); - } - return normalizedRecordIds; - } - - /** - * This method checks/validates and normalizes an incoming depublished record ID for persistence. - * - * @param datasetId The dataset ID to which the depublished record belongs. - * @param recordId The unchecked and non-normalized record ID. - * @return The checked and normalized record ID. Or empty Optional if the incoming ID is empty. - * @throws BadContentException In case the incoming record ID does not validate. - */ - public static Optional checkAndNormalizeRecordId(String datasetId, String recordId) - throws BadContentException { - - // Trim and check that string is not empty. We allow empty record IDs, we return empty optional. - final String recordIdTrimmed = recordId.trim(); - final Optional result; - if (recordIdTrimmed.isEmpty()) { - result = Optional.empty(); - } else { - result = Optional.of(validateNonEmptyRecordId(datasetId, recordIdTrimmed)); - } - return result; - } - - private static String validateNonEmptyRecordId(String datasetId, String recordIdTrimmed) - throws BadContentException { - - // Check if it is a valid URI. This also checks for spaces. Relative URIs pass this test too. - try { - new URI(recordIdTrimmed); - } catch (URISyntaxException e) { - throw new BadContentException("Invalid record ID (is not a valid URI): " + recordIdTrimmed, - e); - } - - // Split in segments based on the slash - don't discard empty segments at the end. - final String[] segments = recordIdTrimmed.split("/", -1); - final String lastSegment = segments[segments.length - 1]; - final String penultimateSegment = segments.length > 1 ? segments[segments.length - 2] : ""; - - // Check last segment: cannot be empty. - if (lastSegment.isEmpty()) { - throw new BadContentException("Invalid record ID (ends with '/'): " + recordIdTrimmed); - } - - // Check last segment: cannot contain invalid characters - if (INVALID_CHAR_IN_RECORD_ID.matcher(lastSegment).find()) { - throw new BadContentException( - "Invalid record ID (contains invalid characters): " + lastSegment); - } - - // Check penultimate segment: if it is empty, it must be because it is the start of the ID. - if (penultimateSegment.isEmpty() && segments.length > 2) { - throw new BadContentException( - "Invalid record ID (dataset ID seems to be missing): " + recordIdTrimmed); - } - - // Check penultimate segment: if it is not empty, it must be equal to the dataset ID. - if (!penultimateSegment.isEmpty() && !penultimateSegment.equals(datasetId)) { - throw new BadContentException( - "Invalid record ID (doesn't seem to belong to the correct dataset): " - + recordIdTrimmed); - } - - // Return the last segment (the record ID without the dataset ID). - return lastSegment; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/TransformationParameters.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/TransformationParameters.java deleted file mode 100644 index 7bf576648a..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/common/TransformationParameters.java +++ /dev/null @@ -1,39 +0,0 @@ -package eu.europeana.metis.core.common; - -import eu.europeana.metis.core.dataset.Dataset; -import java.util.Locale; - -/** - * This class is to be used to create the transformation parameters based on a provided {@link - * Dataset}, so that there is a centralized location of how those parameters should be created. - */ -public class TransformationParameters { - - private final String datasetName; - private final String edmCountry; - private final String edmLanguage; - - /** - * Constructor that initializes all final fields. - * - * @param dataset the provided dataset - */ - public TransformationParameters(Dataset dataset) { - //DatasetName in Transformation should be a concatenation datasetId_datasetName - datasetName = dataset.getDatasetId() + "_" + dataset.getDatasetName(); - edmCountry = dataset.getCountry().getName(); - edmLanguage = dataset.getLanguage().name().toLowerCase(Locale.US); - } - - public String getDatasetName() { - return datasetName; - } - - public String getEdmCountry() { - return edmCountry; - } - - public String getEdmLanguage() { - return edmLanguage; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/Dataset.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/Dataset.java deleted file mode 100644 index ff6bc3275a..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/Dataset.java +++ /dev/null @@ -1,258 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.IndexOptions; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.CountryDeserializer; -import eu.europeana.metis.core.common.CountrySerializer; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.common.LanguageDeserializer; -import eu.europeana.metis.core.common.LanguageSerializer; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import org.bson.types.ObjectId; - -/** - * Dataset model that contains all the required fields for Dataset functionality. - */ -@Entity -@Indexes({ - @Index(fields = {@Field("organizationId"), - @Field("datasetName")}, options = @IndexOptions(unique = true)), - @Index(fields = {@Field("ecloudDatasetId")}, options = @IndexOptions(unique = true)), - @Index(fields = {@Field("datasetId")}), - @Index(fields = {@Field("datasetName")}), - @Index(fields = {@Field("organizationId")}), - @Index(fields = {@Field("organizationName")}), - @Index(fields = {@Field("provider")}), - @Index(fields = {@Field("intermediateProvider")}), - @Index(fields = {@Field("dataProvider")}), - @Index(fields = {@Field("createdByUserId")})}) -public class Dataset implements HasMongoObjectId { - - /** - * Whether a dataset is fit for publication. {@link #PARTIALLY_FIT} means that some records may be - * unfit for publication. - */ - public enum PublicationFitness { - FIT, PARTIALLY_FIT, UNFIT - } - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - private String ecloudDatasetId; - private String datasetId; - private String datasetName; - private String organizationId; - private String organizationName; - private String provider; - private String intermediateProvider; - private String dataProvider; - private String createdByUserId; - - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date createdDate; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date updatedDate; - - private List datasetIdsToRedirectFrom = new ArrayList<>(); - private String replacedBy; - private String replaces; - - @JsonSerialize(using = CountrySerializer.class) - @JsonDeserialize(using = CountryDeserializer.class) - private Country country; - @JsonSerialize(using = LanguageSerializer.class) - @JsonDeserialize(using = LanguageDeserializer.class) - private Language language; - - private String description; - private PublicationFitness publicationFitness; - private String notes; - - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId xsltId; - - @Override - public ObjectId getId() { - return id; - } - - @Override - public void setId(ObjectId id) { - this.id = id; - } - - public String getEcloudDatasetId() { - return ecloudDatasetId; - } - - public void setEcloudDatasetId(String ecloudDatasetId) { - this.ecloudDatasetId = ecloudDatasetId; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getDatasetName() { - return datasetName; - } - - public void setDatasetName(String datasetName) { - this.datasetName = datasetName; - } - - public String getOrganizationId() { - return organizationId; - } - - public void setOrganizationId(String organizationId) { - this.organizationId = organizationId; - } - - public String getOrganizationName() { - return organizationName; - } - - public void setOrganizationName(String organizationName) { - this.organizationName = organizationName; - } - - public String getProvider() { - return provider; - } - - public void setProvider(String provider) { - this.provider = provider; - } - - public String getIntermediateProvider() { - return intermediateProvider; - } - - public void setIntermediateProvider(String intermediateProvider) { - this.intermediateProvider = intermediateProvider; - } - - public String getDataProvider() { - return dataProvider; - } - - public void setDataProvider(String dataProvider) { - this.dataProvider = dataProvider; - } - - public String getCreatedByUserId() { - return createdByUserId; - } - - public void setCreatedByUserId(String createdByUserId) { - this.createdByUserId = createdByUserId; - } - - public Date getCreatedDate() { - return createdDate == null ? null : new Date(createdDate.getTime()); - } - - public void setCreatedDate(Date createdDate) { - this.createdDate = new Date(createdDate.getTime()); - } - - public Date getUpdatedDate() { - return updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - public void setUpdatedDate(Date updatedDate) { - this.updatedDate = updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - public List getDatasetIdsToRedirectFrom() { - return new ArrayList<>(datasetIdsToRedirectFrom); - } - - public void setDatasetIdsToRedirectFrom(List datasetIdsToRedirectFrom) { - this.datasetIdsToRedirectFrom = - datasetIdsToRedirectFrom == null ? new ArrayList<>() : new ArrayList<>( - datasetIdsToRedirectFrom); - } - - public String getReplacedBy() { - return replacedBy; - } - - public void setReplacedBy(String replacedBy) { - this.replacedBy = replacedBy; - } - - public String getReplaces() { - return replaces; - } - - public void setReplaces(String replaces) { - this.replaces = replaces; - } - - public Country getCountry() { - return country; - } - - public void setCountry(Country country) { - this.country = country; - } - - public Language getLanguage() { - return language; - } - - public void setLanguage(Language language) { - this.language = language; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public PublicationFitness getPublicationFitness() { - return publicationFitness; - } - - public void setPublicationFitness(PublicationFitness publicationFitness) { - this.publicationFitness = publicationFitness; - } - - public String getNotes() { - return notes; - } - - public void setNotes(String notes) { - this.notes = notes; - } - - public ObjectId getXsltId() { - return xsltId; - } - - public void setXsltId(ObjectId xsltId) { - this.xsltId = xsltId; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetExecutionInformation.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetExecutionInformation.java deleted file mode 100644 index 1a8f2cef0b..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetExecutionInformation.java +++ /dev/null @@ -1,162 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import com.fasterxml.jackson.annotation.JsonFormat; -import java.util.Date; - -/** - * Contains execution information of a dataset. - *

Such as the last preview, first publish, last publish, last depublish, last harvest - * information.

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-03 - */ -public class DatasetExecutionInformation { - - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date lastPreviewDate; - private int lastPreviewRecords; - private boolean lastPreviewRecordsReadyForViewing; - private int totalPreviewRecords; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date firstPublishedDate; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date lastPublishedDate; - private int lastPublishedRecords; - private boolean lastPublishedRecordsReadyForViewing; - private int totalPublishedRecords; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date lastDepublishedDate; - private int lastDepublishedRecords; - private PublicationStatus publicationStatus; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date lastHarvestedDate; - private int lastHarvestedRecords; - - public DatasetExecutionInformation() { - //Required for json serialization - } - - public Date getLastPreviewDate() { - return lastPreviewDate == null ? null : new Date(lastPreviewDate.getTime()); - } - - public void setLastPreviewDate(Date lastPreviewDate) { - this.lastPreviewDate = lastPreviewDate == null ? null : new Date(lastPreviewDate.getTime()); - } - - public int getLastPreviewRecords() { - return lastPreviewRecords; - } - - public void setLastPreviewRecords(int lastPreviewRecords) { - this.lastPreviewRecords = lastPreviewRecords; - } - - public boolean isLastPreviewRecordsReadyForViewing() { - return lastPreviewRecordsReadyForViewing; - } - - public void setLastPreviewRecordsReadyForViewing(boolean lastPreviewRecordsReadyForViewing) { - this.lastPreviewRecordsReadyForViewing = lastPreviewRecordsReadyForViewing; - } - - public int getTotalPreviewRecords() { - return totalPreviewRecords; - } - - public void setTotalPreviewRecords(int totalPreviewRecords) { - this.totalPreviewRecords = totalPreviewRecords; - } - - public Date getFirstPublishedDate() { - return firstPublishedDate == null ? null : new Date(firstPublishedDate.getTime()); - } - - public void setFirstPublishedDate(Date firstPublishedDate) { - this.firstPublishedDate = - firstPublishedDate == null ? null : new Date(firstPublishedDate.getTime()); - } - - public Date getLastPublishedDate() { - return lastPublishedDate == null ? null : new Date(lastPublishedDate.getTime()); - } - - public void setLastPublishedDate(Date lastPublishedDate) { - this.lastPublishedDate = - lastPublishedDate == null ? null : new Date(lastPublishedDate.getTime()); - } - - public int getLastPublishedRecords() { - return lastPublishedRecords; - } - - public void setLastPublishedRecords(int lastPublishedRecords) { - this.lastPublishedRecords = lastPublishedRecords; - } - - public boolean isLastPublishedRecordsReadyForViewing() { - return lastPublishedRecordsReadyForViewing; - } - - public void setLastPublishedRecordsReadyForViewing(boolean lastPublishedRecordsReadyForViewing) { - this.lastPublishedRecordsReadyForViewing = lastPublishedRecordsReadyForViewing; - } - - public int getTotalPublishedRecords() { - return totalPublishedRecords; - } - - public void setTotalPublishedRecords(int totalPublishedRecords) { - this.totalPublishedRecords = totalPublishedRecords; - } - - public Date getLastDepublishedDate() { - return lastDepublishedDate == null ? null : new Date(lastDepublishedDate.getTime()); - } - - public void setLastDepublishedDate(Date lastDepublishedDate) { - this.lastDepublishedDate = - lastDepublishedDate == null ? null : new Date(lastDepublishedDate.getTime()); - } - - public int getLastDepublishedRecords() { - return lastDepublishedRecords; - } - - public void setLastDepublishedRecords(int lastDepublishedRecords) { - this.lastDepublishedRecords = lastDepublishedRecords; - } - - public PublicationStatus getPublicationStatus() { - return publicationStatus; - } - - public void setPublicationStatus(PublicationStatus publicationStatus) { - this.publicationStatus = publicationStatus; - } - - public Date getLastHarvestedDate() { - return lastHarvestedDate == null ? null : new Date(lastHarvestedDate.getTime()); - } - - public void setLastHarvestedDate(Date lastHarvestedDate) { - this.lastHarvestedDate = - lastHarvestedDate == null ? null : new Date(lastHarvestedDate.getTime()); - } - - public int getLastHarvestedRecords() { - return lastHarvestedRecords; - } - - public void setLastHarvestedRecords(int lastHarvestedRecords) { - this.lastHarvestedRecords = lastHarvestedRecords; - } - - /** - * The status of the dataset with regards to (de)publication. - */ - public enum PublicationStatus { - PUBLISHED, DEPUBLISHED - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetIdSequence.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetIdSequence.java deleted file mode 100644 index 3622238c17..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetIdSequence.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Id; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import org.bson.types.ObjectId; - -/** - * The database structure to hold the dataset identifiers sequence. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-12-27 - */ -@Entity -public class DatasetIdSequence { - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - - private int sequence; - - public DatasetIdSequence() { - //Required for json serialization - } - - /** - * Initialize sequence with provided argument. - * - * @param sequence the number to start the sequence from - */ - public DatasetIdSequence(int sequence) { - this.sequence = sequence; - } - - public ObjectId getId() { - return id; - } - - public void setId(ObjectId id) { - this.id = id; - } - - public int getSequence() { - return sequence; - } - - public void setSequence(int sequence) { - this.sequence = sequence; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetSearchView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetSearchView.java deleted file mode 100644 index 243506ff8d..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetSearchView.java +++ /dev/null @@ -1,65 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import com.fasterxml.jackson.annotation.JsonFormat; -import java.util.Date; - -/** - * Dataset search model that contains all the required fields for Dataset Search functionality. - * - * @author Srishti Singh (srishti.singh@europeana.eu) - * @since 2019-11-12 - */ -public class DatasetSearchView { - - private String datasetId; - private String datasetName; - private String provider; - private String dataProvider; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date lastExecutionDate; - - public DatasetSearchView() { - //Required for json (de)serialization - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getDatasetName() { - return datasetName; - } - - public void setDatasetName(String datasetName) { - this.datasetName = datasetName; - } - - public String getProvider() { - return provider; - } - - public void setProvider(String provider) { - this.provider = provider; - } - - public String getDataProvider() { - return dataProvider; - } - - public void setDataProvider(String dataProvider) { - this.dataProvider = dataProvider; - } - - public Date getLastExecutionDate() { - return lastExecutionDate == null ? null : new Date(lastExecutionDate.getTime()); - } - - public void setLastExecutionDate(Date lastExecutionDate) { - this.lastExecutionDate = - lastExecutionDate == null ? null : new Date(lastExecutionDate.getTime()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXslt.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXslt.java deleted file mode 100644 index edec979378..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXslt.java +++ /dev/null @@ -1,99 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import java.util.Date; -import org.bson.types.ObjectId; - -/** - * A wrapper class with metadata about an xslt and the xslt as a string field. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-27 - */ -//@formatter:off -@Entity -@Indexes({ - @Index(fields = {@Field("datasetId")}), - @Index(fields = {@Field("createdDate")}), - @Index(fields = {@Field("datasetId"), @Field("createdDate")}) -}) -//@formatter:on -public class DatasetXslt { - - public static final String DEFAULT_DATASET_ID = "-1"; - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - - private String datasetId; - private String xslt; - @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX") - private Date createdDate; - - public DatasetXslt() { - //Required for json serialization - } - - /** - * Constructor with required parameters for a dataset-specific XSLT. When created it assigns the - * current date to it. - * - * @param datasetId the datasetId that this class is related to - * @param xslt the raw xslt - */ - public DatasetXslt(String datasetId, String xslt) { - this.datasetId = datasetId; - this.xslt = xslt; - this.createdDate = new Date(); - } - - /** - * Constructor with required parameters for a default XSLT. When created it assigns the current - * date to it. - * - * @param xslt the raw xslt - */ - public DatasetXslt(String xslt) { - this(DEFAULT_DATASET_ID, xslt); - } - - public ObjectId getId() { - return id; - } - - public void setId(ObjectId id) { - this.id = id; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getXslt() { - return xslt; - } - - public void setXslt(String xslt) { - this.xslt = xslt; - } - - public Date getCreatedDate() { - return createdDate == null ? null : new Date(createdDate.getTime()); - } - - public void setCreatedDate(Date createdDate) { - this.createdDate = createdDate == null ? null : new Date(createdDate.getTime()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXsltStringWrapper.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXsltStringWrapper.java deleted file mode 100644 index 41bac4b043..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DatasetXsltStringWrapper.java +++ /dev/null @@ -1,44 +0,0 @@ -package eu.europeana.metis.core.dataset; - -/** - * Used to send over HTTP the dataset with it's corresponding xslt. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-28 - */ -public class DatasetXsltStringWrapper { - - private Dataset dataset; - private String xslt; - - public DatasetXsltStringWrapper() { - //Required for json serialization - } - - /** - * Constructor with all the required paramets - * - * @param dataset {@link Dataset} - * @param xslt the String representation of the xslt text - */ - public DatasetXsltStringWrapper(Dataset dataset, String xslt) { - this.dataset = dataset; - this.xslt = xslt; - } - - public Dataset getDataset() { - return dataset; - } - - public void setDataset(Dataset dataset) { - this.dataset = dataset; - } - - public String getXslt() { - return xslt; - } - - public void setXslt(String xslt) { - this.xslt = xslt; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DepublishRecordId.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DepublishRecordId.java deleted file mode 100644 index b381dec36c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/dataset/DepublishRecordId.java +++ /dev/null @@ -1,103 +0,0 @@ -package eu.europeana.metis.core.dataset; - -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.IndexOptions; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import java.time.Instant; -import org.bson.types.ObjectId; - -/** - * A database model entity representing a depublished (or to-be-depublished) record belonging to a - * dataset. The record is identified by an ID and can have a state and date of depublication. - */ -@Entity -@Indexes({ - @Index(fields = {@Field(DepublishRecordId.DATASET_ID_FIELD), - @Field(DepublishRecordId.RECORD_ID_FIELD)}, options = @IndexOptions(unique = true)), - @Index(fields = {@Field(DepublishRecordId.DATASET_ID_FIELD)}), - @Index(fields = {@Field(DepublishRecordId.RECORD_ID_FIELD)})}) -public class DepublishRecordId implements HasMongoObjectId { - - public static final String ID_FIELD = "_id"; - public static final String DATASET_ID_FIELD = "datasetId"; - public static final String RECORD_ID_FIELD = "recordId"; - public static final String DEPUBLICATION_STATUS_FIELD = "depublicationStatus"; - public static final String DEPUBLICATION_DATE_FIELD = "depublicationDate"; - - /** - * The status of the record with regard to (de)publication. - */ - public enum DepublicationStatus {DEPUBLISHED, PENDING_DEPUBLICATION} - - /** - * The ID of the data object. - **/ - @Id - private ObjectId id; - - /** - * The dataset ID. - **/ - private String datasetId; - - /** - * The record ID (without dataset prefix). - **/ - private String recordId; - - /** - * The state of the record's depublication. - **/ - private DepublicationStatus depublicationStatus; - - /** - * The date of depublication. - **/ - private Instant depublicationDate; - - @Override - public void setId(ObjectId id) { - this.id = id; - } - - @Override - public ObjectId getId() { - return id; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getRecordId() { - return recordId; - } - - public void setRecordId(String recordId) { - this.recordId = recordId; - } - - public DepublicationStatus getDepublicationStatus() { - return depublicationStatus; - } - - public void setDepublicationStatus(DepublicationStatus depublicationStatus) { - this.depublicationStatus = depublicationStatus; - } - - public Instant getDepublicationDate() { - return depublicationDate; - } - - public void setDepublicationDate(Instant depublicationDate) { - this.depublicationDate = depublicationDate; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/DatasetAlreadyExistsException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/DatasetAlreadyExistsException.java deleted file mode 100644 index 17d31fae88..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/DatasetAlreadyExistsException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a dataset already exists in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-17 - */ -@ResponseStatus(value = HttpStatus.CONFLICT, reason = "Dataset already exists") -public class DatasetAlreadyExistsException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public DatasetAlreadyExistsException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/InvalidIndexPluginException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/InvalidIndexPluginException.java deleted file mode 100644 index 5caf654753..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/InvalidIndexPluginException.java +++ /dev/null @@ -1,24 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if index plugin is invalid - */ -@ResponseStatus(value = HttpStatus.NOT_ACCEPTABLE, reason = "Invalid index plugin") -public class InvalidIndexPluginException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public InvalidIndexPluginException(String message) { - super(message); - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoDatasetFoundException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoDatasetFoundException.java deleted file mode 100644 index 865d5a252c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoDatasetFoundException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a dataset does not exist in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-17 - */ -@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "No dataset found") -public class NoDatasetFoundException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public NoDatasetFoundException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoScheduledWorkflowFoundException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoScheduledWorkflowFoundException.java deleted file mode 100644 index 42b5638ea8..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoScheduledWorkflowFoundException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used when a scheduled workflow does not exist in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-26 - */ -@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "No userWorkflow found") -public class NoScheduledWorkflowFoundException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public NoScheduledWorkflowFoundException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowExecutionFoundException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowExecutionFoundException.java deleted file mode 100644 index 85a2d88bda..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowExecutionFoundException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a workflow execution does not exist in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-31 - */ -@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "No userWorkflowExecution found") -public class NoWorkflowExecutionFoundException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public NoWorkflowExecutionFoundException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowFoundException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowFoundException.java deleted file mode 100644 index 5876e6c3d0..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoWorkflowFoundException.java +++ /dev/null @@ -1,28 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exceptin used if a workflow does not exist in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "No userWorkflow found") -public class NoWorkflowFoundException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public NoWorkflowFoundException(String message) { - super(message); - } -} - - diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoXsltFoundException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoXsltFoundException.java deleted file mode 100644 index d1c0068f44..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/NoXsltFoundException.java +++ /dev/null @@ -1,28 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if an xslt does not exist in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-28 - */ -@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "No xslt found") -public class NoXsltFoundException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public NoXsltFoundException(String message) { - super(message); - } - - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/PluginExecutionNotAllowed.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/PluginExecutionNotAllowed.java deleted file mode 100644 index 4d469e17ef..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/PluginExecutionNotAllowed.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a plugin execution is not allowed. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-30 - */ -@ResponseStatus(value = HttpStatus.NOT_ACCEPTABLE, reason = "Plugin Execution Not Allowed") -public class PluginExecutionNotAllowed extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public PluginExecutionNotAllowed(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/ScheduledWorkflowAlreadyExistsException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/ScheduledWorkflowAlreadyExistsException.java deleted file mode 100644 index 08136e12a4..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/ScheduledWorkflowAlreadyExistsException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a scheduled workflow already exists in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-25 - */ -@ResponseStatus(value = HttpStatus.CONFLICT, reason = "ScheduledWorkflow already exists") -public class ScheduledWorkflowAlreadyExistsException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public ScheduledWorkflowAlreadyExistsException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowAlreadyExistsException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowAlreadyExistsException.java deleted file mode 100644 index 7e1050831d..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowAlreadyExistsException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exceptions used if a workflow already exists in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-06-02 - */ -@ResponseStatus(value = HttpStatus.CONFLICT, reason = "User workflow execution already exists") -public class WorkflowAlreadyExistsException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public WorkflowAlreadyExistsException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowExecutionAlreadyExistsException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowExecutionAlreadyExistsException.java deleted file mode 100644 index 734e259a2e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/WorkflowExecutionAlreadyExistsException.java +++ /dev/null @@ -1,26 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import eu.europeana.metis.exception.GenericMetisException; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * Exception used if a workflow execution already exists in the database. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -@ResponseStatus(value = HttpStatus.CONFLICT, reason = "User workflow execution already exists") -public class WorkflowExecutionAlreadyExistsException extends GenericMetisException { - - private static final long serialVersionUID = -3332292346834265371L; - - /** - * Constructs a new exception with the specified detail message. - * - * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. - */ - public WorkflowExecutionAlreadyExistsException(String message) { - super(message); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/XsltSetupException.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/XsltSetupException.java deleted file mode 100644 index d43eee70d5..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/exceptions/XsltSetupException.java +++ /dev/null @@ -1,25 +0,0 @@ -package eu.europeana.metis.core.exceptions; - -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.ResponseStatus; -import eu.europeana.metis.exception.GenericMetisException; - -/** - * Exception used if an XSLT exists in the database but it could not be retrieved or parsed. - */ -@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR, reason = "Bad xslt found.") -public class XsltSetupException extends GenericMetisException { - - /** This is an instance of {@link java.io.Serializable}. **/ - private static final long serialVersionUID = 3604852827523793668L; - - /** - * Constructor. - * - * @param message The message. - * @param cause The cause. - */ - public XsltSetupException(String message, Exception cause) { - super(message, cause); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublicationInfoView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublicationInfoView.java deleted file mode 100644 index daac6c75f5..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublicationInfoView.java +++ /dev/null @@ -1,22 +0,0 @@ -package eu.europeana.metis.core.rest; - -public class DepublicationInfoView { - - private final ResponseListWrapper depublicationRecordIds; - private final boolean depublicationTriggerable; - - public DepublicationInfoView( - ResponseListWrapper depublicationRecordIds, - boolean depublicationTriggerable) { - this.depublicationRecordIds = depublicationRecordIds; - this.depublicationTriggerable = depublicationTriggerable; - } - - public ResponseListWrapper getDepublicationRecordIds() { - return depublicationRecordIds; - } - - public boolean isDepublicationTriggerable() { - return depublicationTriggerable; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublishRecordIdView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublishRecordIdView.java deleted file mode 100644 index 29d468725b..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/DepublishRecordIdView.java +++ /dev/null @@ -1,60 +0,0 @@ -package eu.europeana.metis.core.rest; - -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import eu.europeana.metis.core.common.JavaTimeSerialization.IsoInstantSerializer; -import eu.europeana.metis.core.dataset.DepublishRecordId; -import java.time.Instant; - -/** - * An immutable view on the depublish record id. - */ -public class DepublishRecordIdView { - - private final String recordId; - private final DepublicationStatus depublicationStatus; - - @JsonSerialize(using = IsoInstantSerializer.class) - private final Instant depublicationDate; - - /** - * Constructor. - * @param record The record to create this view for. - */ - public DepublishRecordIdView(DepublishRecordId record) { - this.recordId = record.getRecordId(); - this.depublicationDate = record.getDepublicationDate(); - this.depublicationStatus = DepublicationStatus - .convertFromModelToView(record.getDepublicationStatus()); - } - - public String getRecordId() { - return recordId; - } - - public DepublicationStatus getDepublicationStatus() { - return depublicationStatus; - } - - public Instant getDepublicationDate() { - return depublicationDate; - } - - /** - * The status of this record with regards to (de)publication. - */ - public enum DepublicationStatus { - DEPUBLISHED, PENDING; - - private static DepublicationStatus convertFromModelToView( - DepublishRecordId.DepublicationStatus depublicationStatus) { - DepublicationStatus depublicationStatusView = null; - if (depublicationStatus != null) { - switch (depublicationStatus) { - case DEPUBLISHED -> depublicationStatusView = DepublicationStatus.DEPUBLISHED; - default -> depublicationStatusView = DepublicationStatus.PENDING; - } - } - return depublicationStatusView; - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ExecutionHistory.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ExecutionHistory.java deleted file mode 100644 index ebb4e96c7e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ExecutionHistory.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.rest; - -import com.fasterxml.jackson.annotation.JsonFormat; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -/** - * This class represents the entire execution history for a dataset. - */ -public class ExecutionHistory { - - private List executions; - - public List getExecutions() { - return Collections.unmodifiableList(executions); - } - - public void setExecutions(Collection executions) { - this.executions = new ArrayList<>(executions); - } - - /** - * This class represents one workflow execution. - */ - public static class Execution { - - private String workflowExecutionId; - - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date startedDate; - - public String getWorkflowExecutionId() { - return workflowExecutionId; - } - - public Date getStartedDate() { - return new Date(startedDate.getTime()); - } - - public void setWorkflowExecutionId(String workflowExecutionId) { - this.workflowExecutionId = workflowExecutionId; - } - - public void setStartedDate(Date startedDate) { - this.startedDate = new Date(startedDate.getTime()); - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/IncrementalHarvestingAllowedView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/IncrementalHarvestingAllowedView.java deleted file mode 100644 index d91db4ac43..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/IncrementalHarvestingAllowedView.java +++ /dev/null @@ -1,17 +0,0 @@ -package eu.europeana.metis.core.rest; - -/** - * An object wrapping a boolean indicating whether incremental harvesting is allowed. - */ -public class IncrementalHarvestingAllowedView { - - private final boolean incrementalHarvestingAllowed; - - public IncrementalHarvestingAllowedView(boolean incrementalHarvestingAllowed) { - this.incrementalHarvestingAllowed = incrementalHarvestingAllowed; - } - - public boolean isIncrementalHarvestingAllowed() { - return incrementalHarvestingAllowed; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ListOfIds.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ListOfIds.java deleted file mode 100644 index f50a73b3e3..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ListOfIds.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.rest; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * Class that encapsulates a list of {@link String} ID objects. - */ -public class ListOfIds { - - private List ids; - - public List getIds() { - return ids == null ? Collections.emptyList() : new ArrayList<>(ids); - } - - public void setIds(List ids) { - this.ids = new ArrayList<>(ids); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PaginatedRecordsResponse.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PaginatedRecordsResponse.java deleted file mode 100644 index f33f9b2e35..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PaginatedRecordsResponse.java +++ /dev/null @@ -1,31 +0,0 @@ -package eu.europeana.metis.core.rest; - -import java.util.List; - -/** - * Class that encapsulates a list of {@link Record} including a {@link #nextPage} field. - */ -public class PaginatedRecordsResponse extends RecordsResponse { - - private String nextPage; - - /** - * Constructor with the required parameters. - * - * @param records the list of {@link Record} - * @param nextPage the String representation of the nextPage which is retrieved from a previous - * response - */ - public PaginatedRecordsResponse(List records, String nextPage) { - super(records); - this.nextPage = nextPage; - } - - public String getNextPage() { - return nextPage; - } - - public void setNextPage(String nextPage) { - this.nextPage = nextPage; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PluginsWithDataAvailability.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PluginsWithDataAvailability.java deleted file mode 100644 index 98e87f6542..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/PluginsWithDataAvailability.java +++ /dev/null @@ -1,49 +0,0 @@ -package eu.europeana.metis.core.rest; - -import eu.europeana.metis.core.workflow.plugins.PluginType; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * This class represents a list of plugins with an indication for each on whether there is - * successfully processed data. - */ -public class PluginsWithDataAvailability { - - private List plugins; - - public void setPlugins(List plugins) { - this.plugins = new ArrayList<>(plugins); - } - - public List getPlugins() { - return Collections.unmodifiableList(plugins); - } - - /** - * This class represents a plugin with an indication on whether there is successfully processed - * data. - */ - public static class PluginWithDataAvailability { - - private PluginType pluginType; - private boolean canDisplayRawXml; - - public void setPluginType(PluginType pluginType) { - this.pluginType = pluginType; - } - - public PluginType getPluginType() { - return pluginType; - } - - public void setCanDisplayRawXml(boolean canDisplayRawXml) { - this.canDisplayRawXml = canDisplayRawXml; - } - - public boolean isCanDisplayRawXml() { - return canDisplayRawXml; - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/Record.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/Record.java deleted file mode 100644 index aa059b05a7..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/Record.java +++ /dev/null @@ -1,45 +0,0 @@ -package eu.europeana.metis.core.rest; - -/** - * Model class that encapsulates the ecloud identifier and the xml contents of a particular state of - * that record, which can be different on each use. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-23 - */ -public class Record { - - private String ecloudId; - private String xmlRecord; - - public Record() { - //Required for json serialization - } - - /** - * Constructor with the required arguments - * - * @param ecloudId the ecloudId representing the record - * @param xmlRecord the text representing the xml record - */ - public Record(String ecloudId, String xmlRecord) { - this.ecloudId = ecloudId; - this.xmlRecord = xmlRecord; - } - - public String getEcloudId() { - return ecloudId; - } - - public void setEcloudId(String ecloudId) { - this.ecloudId = ecloudId; - } - - public String getXmlRecord() { - return xmlRecord; - } - - public void setXmlRecord(String xmlRecord) { - this.xmlRecord = xmlRecord; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RecordsResponse.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RecordsResponse.java deleted file mode 100644 index 0c54b48932..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RecordsResponse.java +++ /dev/null @@ -1,32 +0,0 @@ -package eu.europeana.metis.core.rest; - -import java.util.ArrayList; -import java.util.List; - -/** - * Class that encapsulates a list of {@link Record} objects. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-26 - */ -public class RecordsResponse { - - private List records; - - /** - * Constructor with the required parameters. - * - * @param records the list of {@link Record} - */ - public RecordsResponse(List records) { - this.records = new ArrayList<>(records); - } - - public List getRecords() { - return new ArrayList<>(records); - } - - public void setRecords(List records) { - this.records = new ArrayList<>(records); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RequestLimits.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RequestLimits.java deleted file mode 100644 index c60ac3ae91..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/RequestLimits.java +++ /dev/null @@ -1,25 +0,0 @@ -package eu.europeana.metis.core.rest; - -/** - * Contains default values for limits per specific endpoint requests. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-16 - */ -public enum RequestLimits { - - DATASETS_PER_REQUEST(10), - WORKFLOW_EXECUTIONS_PER_REQUEST(10), - SCHEDULED_EXECUTIONS_PER_REQUEST(10), - DEPUBLISHED_RECORDS_PER_REQUEST(20); - - private final int limit; - - RequestLimits(int limit) { - this.limit = limit; - } - - public int getLimit() { - return limit; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ResponseListWrapper.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ResponseListWrapper.java deleted file mode 100644 index 5109e47e66..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/ResponseListWrapper.java +++ /dev/null @@ -1,139 +0,0 @@ -package eu.europeana.metis.core.rest; - -import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper; -import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; - - -/** - * Class used to wrap a list of result object to be given back as a response on a REST API - * endpoint. - * - * @param the type of objects to be wrapped - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-06-01 - */ - -public class ResponseListWrapper { - - @JacksonXmlElementWrapper(localName = "Results") - @JacksonXmlProperty(localName = "Result") - private List results; - private int listSize; - private int nextPage; - private Boolean maxResultCountReached; - - /** - * Accepts a list of results objects, and based on the resultsPerRequestLimit it will determine if - * there would be another nextPage. This method assumes a page count of 1. - * - * @param results the {@link List} of objects - * @param resultsPerRequestLimit the imposed result limit per request (i.e. page length) - * @param page the non-negative requested page number - */ - public void setResultsAndLastPage(List results, int resultsPerRequestLimit, int page) { - this.setResultsAndLastPage(results, resultsPerRequestLimit, page, null); - } - - /** - * Accepts a list of results objects, and based on the resultsPerRequestLimit it will determine if - * there would be another nextPage. This method assumes a page count of 1. - * - * @param results the {@link List} of objects - * @param resultsPerRequestLimit the imposed result limit per request (i.e. page length) - * @param page the non-negative requested page number - * @param maxResultCountReached whether the maximum result count is reached (the number of - * results, regardless of pagination, the server is willing to serve). Can be null if this - * is not applicable. - */ - public void setResultsAndLastPage(List results, int resultsPerRequestLimit, int page, - Boolean maxResultCountReached) { - setResultsAndLastPage(results, resultsPerRequestLimit, page, 1, maxResultCountReached); - } - - /** - * Accepts a list of results objects, and based on the resultsPerRequestLimit it will determine if - * there would be another nextPage. - * - * @param results the {@link List} of objects - * @param resultsPerRequestLimit the imposed result limit per request (i.e. page length) - * @param page the non-negative requested page number - * @param pageCount the number of pages that were requested. - */ - public void setResultsAndLastPage(List results, int resultsPerRequestLimit, int page, - int pageCount) { - this.setResultsAndLastPage(results, resultsPerRequestLimit, page, pageCount, null); - } - - /** - * Accepts a list of results objects, and based on the resultsPerRequestLimit it will determine if - * there would be another nextPage. - * - * @param results the {@link List} of objects - * @param resultsPerRequestLimit the imposed result limit per request (i.e. page length) - * @param page the non-negative requested page number - * @param pageCount the number of pages that were requested. - * @param maxResultCountReached whether the maximum result count is reached (the number of - * results, regardless of pagination, the server is willing to serve). Can be null if this - * is not applicable. - */ - public void setResultsAndLastPage(List results, int resultsPerRequestLimit, int page, - int pageCount, Boolean maxResultCountReached) { - if (results == null || results.isEmpty() || Boolean.TRUE.equals(maxResultCountReached)) { - this.nextPage = -1; - } else if (results.size() < resultsPerRequestLimit * pageCount) { - this.nextPage = -1; - } else { - this.nextPage = page + pageCount; - } - this.listSize = results == null ? 0 : results.size(); - this.maxResultCountReached = maxResultCountReached; - setResults(results); - } - - /** - * Clear the internal data. Used to not recreate the same structure all the time if it's used in a - * loop. - */ - public void clear() { - if (results != null) { - results.clear(); - } - listSize = 0; - nextPage = 0; - } - - public List getResults() { - return Optional.ofNullable(results).map(ArrayList::new).orElse(null); - } - - public void setResults(List results) { - this.results = Optional.ofNullable(results).map(ArrayList::new).orElse(null); - } - - public int getNextPage() { - return nextPage; - } - - public void setNextPage(int nextPage) { - this.nextPage = nextPage; - } - - public int getListSize() { - return listSize; - } - - public void setListSize(int listSize) { - this.listSize = listSize; - } - - public Boolean getMaxResultCountReached() { - return maxResultCountReached; - } - - public void setMaxResultCountReached(Boolean maxResultCountReached) { - this.maxResultCountReached = maxResultCountReached; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/VersionEvolution.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/VersionEvolution.java deleted file mode 100644 index bb57b54f31..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/VersionEvolution.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.europeana.metis.core.rest; - -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -/** - * This class represents a history of operations that are applied to a dataset. - */ -public class VersionEvolution { - - private List evolutionSteps; - - public List getEvolutionSteps() { - return Collections.unmodifiableList(evolutionSteps); - } - - public void setEvolutionSteps(Collection versions) { - this.evolutionSteps = new ArrayList<>(versions); - } - - /** - * This class represents one operation applied to a dataset. - */ - public static class VersionEvolutionStep { - - private String workflowExecutionId; - private ExecutablePluginType pluginType; - private Date finishedTime; - - public String getWorkflowExecutionId() { - return workflowExecutionId; - } - - public void setWorkflowExecutionId(String workflowExecutionId) { - this.workflowExecutionId = workflowExecutionId; - } - - public ExecutablePluginType getPluginType() { - return pluginType; - } - - public void setPluginType(ExecutablePluginType pluginType) { - this.pluginType = pluginType; - } - - public Date getFinishedTime() { - return new Date(finishedTime.getTime()); - } - - public void setFinishedTime(Date finishedTime) { - this.finishedTime = new Date(finishedTime.getTime()); - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginProgressView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginProgressView.java deleted file mode 100644 index 6ad698a510..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginProgressView.java +++ /dev/null @@ -1,62 +0,0 @@ -package eu.europeana.metis.core.rest.execution.details; - -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; - -/** - * This class contains executionProgress information on a plugin's execution. - */ -public class PluginProgressView { - - private final int expectedRecords; - private final int processedRecords; - private final int ignoredRecords; - private final int deletedRecords; - private final int progressPercentage; - private final int errors; - private final TaskState status; - private int totalDatabaseRecords; - - PluginProgressView(ExecutionProgress progress) { - this.expectedRecords = progress.getExpectedRecords(); - this.processedRecords = progress.getProcessedRecords(); - this.ignoredRecords = progress.getIgnoredRecords(); - this.deletedRecords = progress.getDeletedRecords(); - this.errors = progress.getErrors(); - this.progressPercentage = progress.getProgressPercentage(); - this.status = progress.getStatus(); - this.totalDatabaseRecords = progress.getTotalDatabaseRecords(); - } - - public int getExpectedRecords() { - return expectedRecords; - } - - public int getProcessedRecords() { - return processedRecords; - } - - public int getIgnoredRecords() { - return ignoredRecords; - } - - public int getDeletedRecords() { - return deletedRecords; - } - - public int getProgressPercentage() { - return progressPercentage; - } - - public int getErrors() { - return errors; - } - - public TaskState getStatus() { - return status; - } - - public int getTotalDatabaseRecords() { - return totalDatabaseRecords; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginView.java deleted file mode 100644 index 0d666ba64e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/PluginView.java +++ /dev/null @@ -1,110 +0,0 @@ -package eu.europeana.metis.core.rest.execution.details; - -import com.fasterxml.jackson.annotation.JsonFormat; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.MetisPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.Date; - -/** - * This class represents the complete information on a plugin execution needed for the execution history. - */ -public class PluginView { - - private final PluginType pluginType; - private final String id; - private final PluginStatus pluginStatus; - private final DataStatus dataStatus; - private final String failMessage; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date startedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date updatedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date finishedDate; - private final String externalTaskId; - private final PluginProgressView executionProgress; - private final String topologyName; - private final boolean canDisplayRawXml; - private final MetisPluginMetadata pluginMetadata; - - PluginView(AbstractMetisPlugin plugin, boolean canDisplayRawXml) { - this.pluginType = plugin.getPluginType(); - this.id = plugin.getId(); - this.pluginStatus = plugin.getPluginStatus(); - this.dataStatus = plugin.getDataStatus(); - this.failMessage = plugin.getFailMessage(); - this.startedDate = plugin.getStartedDate(); - this.finishedDate = plugin.getFinishedDate(); - this.canDisplayRawXml = canDisplayRawXml; - if (plugin instanceof AbstractExecutablePlugin) { - this.updatedDate = ((AbstractExecutablePlugin) plugin).getUpdatedDate(); - this.externalTaskId = ((AbstractExecutablePlugin) plugin).getExternalTaskId(); - this.executionProgress = new PluginProgressView(((AbstractExecutablePlugin) plugin).getExecutionProgress()); - this.topologyName = ((AbstractExecutablePlugin) plugin).getTopologyName(); - this.pluginMetadata = ((AbstractExecutablePlugin) plugin).getPluginMetadata(); - } else { - this.updatedDate = null; - this.externalTaskId = null; - this.executionProgress = null; - this.topologyName = null; - this.pluginMetadata = null; - } - } - - public PluginType getPluginType() { - return pluginType; - } - - public String getId() { - return id; - } - - public PluginStatus getPluginStatus() { - return pluginStatus; - } - - public DataStatus getDataStatus() { - return dataStatus; - } - - public String getFailMessage() { - return failMessage; - } - - public Date getStartedDate() { - return startedDate != null ? new Date(startedDate.getTime()) : null; - } - - public Date getUpdatedDate() { - return updatedDate != null ? new Date(updatedDate.getTime()) : null; - } - - public Date getFinishedDate() { - return finishedDate != null ? new Date(finishedDate.getTime()) : null; - } - - public String getExternalTaskId() { - return externalTaskId; - } - - public PluginProgressView getExecutionProgress() { - return executionProgress; - } - - public String getTopologyName() { - return topologyName; - } - - public boolean isCanDisplayRawXml() { - return canDisplayRawXml; - } - - public MetisPluginMetadata getPluginMetadata() { - return pluginMetadata; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/WorkflowExecutionView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/WorkflowExecutionView.java deleted file mode 100644 index 157634dcbb..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/details/WorkflowExecutionView.java +++ /dev/null @@ -1,120 +0,0 @@ -package eu.europeana.metis.core.rest.execution.details; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.annotation.JsonProperty; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.function.Predicate; - -/** - * This class represents the full information on a workflow execution needed for the execution - * history. - */ -public class WorkflowExecutionView { - - private final String id; - private final String datasetId; - private final WorkflowStatus workflowStatus; - private final String ecloudDatasetId; - private final String cancelledBy; - private final String startedBy; - private final int workflowPriority; - private final boolean cancelling; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date createdDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date startedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date updatedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private final Date finishedDate; - private final boolean isIncremental; - private final List metisPlugins; - - /** - * Constructor. - * @param execution The execution for which to construct this view. - * @param isIncremental Defines if a workflow execution is an incremental one. - * @param canDisplayRawXml A predicate that can decide whether a plugin has results to display. - */ - public WorkflowExecutionView(WorkflowExecution execution, boolean isIncremental, Predicate> canDisplayRawXml) { - this.id = execution.getId().toString(); - this.datasetId = execution.getDatasetId(); - this.workflowStatus = execution.getWorkflowStatus(); - this.ecloudDatasetId = execution.getEcloudDatasetId(); - this.cancelledBy = execution.getCancelledBy(); - this.startedBy = execution.getStartedBy(); - this.workflowPriority = execution.getWorkflowPriority(); - this.cancelling = execution.isCancelling(); - this.createdDate = execution.getCreatedDate(); - this.startedDate = execution.getStartedDate(); - this.updatedDate = execution.getUpdatedDate(); - this.finishedDate = execution.getFinishedDate(); - this.isIncremental = isIncremental; - this.metisPlugins = execution.getMetisPlugins().stream() - .map(plugin -> new PluginView(plugin, canDisplayRawXml.test(plugin))) - .toList(); - } - - public String getId() { - return id; - } - - public String getDatasetId() { - return datasetId; - } - - public WorkflowStatus getWorkflowStatus() { - return workflowStatus; - } - - public String getEcloudDatasetId() { - return ecloudDatasetId; - } - - public String getCancelledBy() { - return cancelledBy; - } - - public String getStartedBy() { - return startedBy; - } - - public int getWorkflowPriority() { - return workflowPriority; - } - - public boolean isCancelling() { - return cancelling; - } - - public Date getCreatedDate() { - return createdDate != null ? new Date(createdDate.getTime()) : null; - } - - public Date getStartedDate() { - return startedDate != null ? new Date(startedDate.getTime()) : null; - } - - public Date getUpdatedDate() { - return updatedDate != null ? new Date(updatedDate.getTime()) : null; - } - - public Date getFinishedDate() { - return finishedDate != null ? new Date(finishedDate.getTime()) : null; - } - - @JsonProperty("isIncremental") - public boolean isIncremental() { - return isIncremental; - } - - public List getMetisPlugins() { - return metisPlugins != null ? new ArrayList<>(metisPlugins) : null; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/DatasetSummaryView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/DatasetSummaryView.java deleted file mode 100644 index 75b3007ae7..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/DatasetSummaryView.java +++ /dev/null @@ -1,34 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import eu.europeana.metis.core.dataset.Dataset; - -/** - * This class represents the vital information on a dataset needed for the execution overview. - */ -public class DatasetSummaryView { - - private String id; - private String datasetId; - private String datasetName; - - DatasetSummaryView() { - } - - DatasetSummaryView(Dataset dataset) { - this.id = dataset.getId().toString(); - this.datasetId = dataset.getDatasetId(); - this.datasetName = dataset.getDatasetName(); - } - - public String getId() { - return id; - } - - public String getDatasetId() { - return datasetId; - } - - public String getDatasetName() { - return datasetName; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionAndDatasetView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionAndDatasetView.java deleted file mode 100644 index eb1bb735cc..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionAndDatasetView.java +++ /dev/null @@ -1,60 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import org.bson.types.ObjectId; - -/** - * This object contains an instance of {@link WorkflowExecution}, but paired to it the execution's - * {@link Dataset}. It implements {@link HasMongoObjectId} so that it can be a type for {@link - * ResponseListWrapper}. - */ -public class ExecutionAndDatasetView implements HasMongoObjectId { - - private ExecutionSummaryView execution; - private DatasetSummaryView dataset; - private ExecutionProgressView executionProgress; - - ExecutionAndDatasetView() { - } - - /** - * Constructor. - * - * @param execution The exection. - * @param dataset The dataset that matches the execution (i.e. {@link Dataset#getId()} matches - * {@link WorkflowExecution#getDatasetId()}). - */ - public ExecutionAndDatasetView(WorkflowExecution execution, Dataset dataset) { - this.execution = new ExecutionSummaryView(execution); - this.dataset = new DatasetSummaryView(dataset); - this.executionProgress = new ExecutionProgressView(execution); - } - - public ExecutionSummaryView getExecution() { - return execution; - } - - public DatasetSummaryView getDataset() { - return dataset; - } - - public ExecutionProgressView getExecutionProgress() { - return executionProgress; - } - - @Override - @JsonIgnore - public ObjectId getId() { - return new ObjectId(execution.getId()); - } - - @Override - @JsonIgnore - public void setId(ObjectId id) { - execution.setId(id.toString()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionProgressView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionProgressView.java deleted file mode 100644 index 78a5c2b6cd..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionProgressView.java +++ /dev/null @@ -1,55 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * This object contains all information regarding the workflow execution's progress. - */ -public class ExecutionProgressView { - - private static final Set EXECUTING_STATUS_SET = Stream - .of(PluginStatus.RUNNING, PluginStatus.CLEANING, PluginStatus.PENDING, - PluginStatus.IDENTIFYING_DELETED_RECORDS).collect(Collectors.toSet()); - private static final Set FINISHED_STATUS_SET = Stream - .of(PluginStatus.FINISHED, PluginStatus.FAILED, PluginStatus.CANCELLED) - .collect(Collectors.toSet()); - - private int stepsDone; - private int stepsTotal; - private PluginProgressView currentPluginProgress; - - ExecutionProgressView() { - } - - ExecutionProgressView(WorkflowExecution execution) { - List metisPlugins = execution.getMetisPlugins(); - this.stepsDone = (int) metisPlugins.stream() - .map(AbstractMetisPlugin::getPluginStatus).filter(FINISHED_STATUS_SET::contains).count(); - final AbstractMetisPlugin currentPlugin = metisPlugins.stream() - .filter(plugin -> EXECUTING_STATUS_SET.contains(plugin.getPluginStatus())).findFirst() - .orElse(null); - this.stepsTotal = metisPlugins.size(); - if (currentPlugin instanceof AbstractExecutablePlugin abstractExecutablePlugin) { - this.currentPluginProgress = new PluginProgressView(abstractExecutablePlugin.getExecutionProgress()); - } - } - - public int getStepsDone() { - return stepsDone; - } - - public int getStepsTotal() { - return stepsTotal; - } - - public PluginProgressView getCurrentPluginProgress() { - return currentPluginProgress; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionSummaryView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionSummaryView.java deleted file mode 100644 index f564238084..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/ExecutionSummaryView.java +++ /dev/null @@ -1,78 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; - -/** - * This class represents the vital information on a workflow execution needed for the execution - * overview. - */ -public class ExecutionSummaryView { - - private String id; - private WorkflowStatus workflowStatus; - private boolean cancelling; - - private Date createdDate; - private Date startedDate; - private Date updatedDate; - private Date finishedDate; - - private List plugins; - - ExecutionSummaryView() { - } - - ExecutionSummaryView(WorkflowExecution execution) { - this.id = execution.getId().toString(); - this.workflowStatus = execution.getWorkflowStatus(); - this.cancelling = execution.isCancelling(); - this.createdDate = execution.getCreatedDate(); - this.startedDate = execution.getStartedDate(); - this.updatedDate = execution.getUpdatedDate(); - this.finishedDate = execution.getFinishedDate(); - this.plugins = execution.getMetisPlugins().stream().map(PluginSummaryView::new).collect( - Collectors.toList()); - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public WorkflowStatus getWorkflowStatus() { - return workflowStatus; - } - - public boolean isCancelling() { - return cancelling; - } - - public Date getCreatedDate() { - return Optional.ofNullable(createdDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public Date getStartedDate() { - return Optional.ofNullable(startedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public Date getUpdatedDate() { - return Optional.ofNullable(updatedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public Date getFinishedDate() { - return Optional.ofNullable(finishedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public List getPlugins() { - return Collections.unmodifiableList(plugins); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginProgressView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginProgressView.java deleted file mode 100644 index 8ff23563b3..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginProgressView.java +++ /dev/null @@ -1,54 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; - -/** - * This class contains executionProgress information on a plugin's execution. - */ -public class PluginProgressView { - - private int expectedRecords; - private int processedRecords; - private int ignoredRecords; - private int deletedRecords; - private int errors; - private int progressPercentage; - - PluginProgressView() { - } - - PluginProgressView(ExecutionProgress progress) { - if (progress != null) { - this.expectedRecords = progress.getExpectedRecords(); - this.processedRecords = progress.getProcessedRecords(); - this.ignoredRecords = progress.getIgnoredRecords(); - this.deletedRecords = progress.getDeletedRecords(); - this.errors = progress.getErrors(); - this.progressPercentage = progress.getProgressPercentage(); - } - } - - public int getExpectedRecords() { - return expectedRecords; - } - - public int getProcessedRecords() { - return processedRecords; - } - - public int getIgnoredRecords() { - return ignoredRecords; - } - - public int getDeletedRecords() { - return deletedRecords; - } - - public int getErrors() { - return errors; - } - - public int getProgressPercentage() { - return progressPercentage; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginSummaryView.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginSummaryView.java deleted file mode 100644 index f32c55ba2f..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/execution/overview/PluginSummaryView.java +++ /dev/null @@ -1,66 +0,0 @@ -package eu.europeana.metis.core.rest.execution.overview; - -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import java.util.Date; -import java.util.Optional; - -/** - * This class represents the vital information on a plugin execution needed for the execution - * overview. - */ -public class PluginSummaryView { - - private PluginType pluginType; - private PluginStatus pluginStatus; - private String failMessage; - private Date startedDate; - private Date updatedDate; - private Date finishedDate; - private PluginProgressView progress; - - PluginSummaryView() { - } - - PluginSummaryView(AbstractMetisPlugin plugin) { - this.pluginType = plugin.getPluginType(); - this.pluginStatus = plugin.getPluginStatus(); - this.failMessage = plugin.getFailMessage(); - this.startedDate = plugin.getStartedDate(); - this.finishedDate = plugin.getFinishedDate(); - if (plugin instanceof AbstractExecutablePlugin abstractExecutablePlugin) { - this.updatedDate = abstractExecutablePlugin.getUpdatedDate(); - this.progress = new PluginProgressView(abstractExecutablePlugin.getExecutionProgress()); - } - } - - public PluginType getPluginType() { - return pluginType; - } - - public PluginStatus getPluginStatus() { - return pluginStatus; - } - - public String getFailMessage() { - return failMessage; - } - - public Date getStartedDate() { - return Optional.ofNullable(startedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public Date getUpdatedDate() { - return Optional.ofNullable(updatedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public Date getFinishedDate() { - return Optional.ofNullable(finishedDate).map(Date::getTime).map(Date::new).orElse(null); - } - - public PluginProgressView getProgress() { - return progress; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/AttributeStatistics.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/AttributeStatistics.java deleted file mode 100644 index 7e9af74c07..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/AttributeStatistics.java +++ /dev/null @@ -1,36 +0,0 @@ -package eu.europeana.metis.core.rest.stats; - -/** - * Statistics object that reflect the attribute level: statistics cover all attributes of this - * name/path, within nodes with the same xPath and the same node value. - */ -public class AttributeStatistics { - - private String xPath; - private String value; - private long occurrences; - - public String getxPath() { - return xPath; - } - - public void setxPath(String xPath) { - this.xPath = xPath; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - - public long getOccurrences() { - return occurrences; - } - - public void setOccurrences(long occurrences) { - this.occurrences = occurrences; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodePathStatistics.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodePathStatistics.java deleted file mode 100644 index 3c5df40085..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodePathStatistics.java +++ /dev/null @@ -1,31 +0,0 @@ -package eu.europeana.metis.core.rest.stats; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * Statistics object that reflect the node level: statistics cover all nodes with the same xPath. - */ -public class NodePathStatistics { - - private String xPath; - private List nodeValueStatistics; - - public String getxPath() { - return xPath; - } - - public void setxPath(String xPath) { - this.xPath = xPath; - } - - public List getNodeValueStatistics() { - return Collections.unmodifiableList(nodeValueStatistics); - } - - public void setNodeValueStatistics( - List nodeValueStatistics) { - this.nodeValueStatistics = new ArrayList<>(nodeValueStatistics); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodeValueStatistics.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodeValueStatistics.java deleted file mode 100644 index 7ac141d59b..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/NodeValueStatistics.java +++ /dev/null @@ -1,41 +0,0 @@ -package eu.europeana.metis.core.rest.stats; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * Statistics object that reflect the node value level: statistics cover all nodes with the same - * xPath and the same node value. - */ -public class NodeValueStatistics { - - private String value; - private long occurrences; - private List attributeStatistics; - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - - public long getOccurrences() { - return occurrences; - } - - public void setOccurrences(long occurrences) { - this.occurrences = occurrences; - } - - public List getAttributeStatistics() { - return Collections.unmodifiableList(attributeStatistics); - } - - public void setAttributeStatistics( - List attributeStatistics) { - this.attributeStatistics = new ArrayList<>(attributeStatistics); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/RecordStatistics.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/RecordStatistics.java deleted file mode 100644 index ba52dcddbe..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/rest/stats/RecordStatistics.java +++ /dev/null @@ -1,31 +0,0 @@ -package eu.europeana.metis.core.rest.stats; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * Statistics object that reflect the record level: statistics cover all records. - */ -public class RecordStatistics { - - private long taskId; - private List nodePathStatistics; - - public long getTaskId() { - return taskId; - } - - public void setTaskId(long taskId) { - this.taskId = taskId; - } - - public List getNodePathStatistics() { - return Collections.unmodifiableList(nodePathStatistics); - } - - public void setNodePathStatistics( - List nodePathStatistics) { - this.nodePathStatistics = new ArrayList<>(nodePathStatistics); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduleFrequence.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduleFrequence.java deleted file mode 100644 index 821b758c6e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduleFrequence.java +++ /dev/null @@ -1,27 +0,0 @@ -package eu.europeana.metis.core.workflow; - -import com.fasterxml.jackson.annotation.JsonCreator; - -/** - * Enumeration with all the possible frequence values - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-25 - */ -public enum ScheduleFrequence { - ONCE, DAILY, WEEKLY, MONTHLY, NULL; - - /** - * During json deserialization the name used on the corresponding field is looked up in the list of enum value field names. - * @param name the value in the json field - * @return the {@link ScheduleFrequence} value corresponding to the json field value - */ - @JsonCreator - public static ScheduleFrequence getScheduleFrequenceFromEnumName(String name){ - for (ScheduleFrequence scheduleFrequence: ScheduleFrequence.values()) { - if(scheduleFrequence.name().equalsIgnoreCase(name)){ - return scheduleFrequence; - } - } - return NULL; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduledWorkflow.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduledWorkflow.java deleted file mode 100644 index ae48115242..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ScheduledWorkflow.java +++ /dev/null @@ -1,99 +0,0 @@ -package eu.europeana.metis.core.workflow; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import java.util.Date; -import org.bson.types.ObjectId; - -/** - * Class to represent a scheduled workflow. - * The {@link ScheduleFrequence} {@link #scheduleFrequence} will be used in conjunction with the {@link #pointerDate} to determine when a scheduled execution is ready to be ran. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-25 - */ -@Entity -@Indexes({ - @Index(fields = {@Field("datasetId")}), - @Index(fields = {@Field("pointerDate")})}) -public class ScheduledWorkflow implements HasMongoObjectId { - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - private String datasetId; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT_FOR_SCHEDULING) - private Date pointerDate; - private ScheduleFrequence scheduleFrequence; - private int workflowPriority; - - public ScheduledWorkflow() { - //Required for json serialization - } - - /** - * Constructor for creating a scheduled workflow - * - * @param pointerDate the {@link Date} that will be used as a pointer Date - * @param datasetId identifier of the dataset for the scheduled workflow - * @param scheduleFrequence the {@link ScheduleFrequence} for the workflow - * @param workflowPriority the priority of the workflow when it is run - */ - public ScheduledWorkflow(Date pointerDate, String datasetId, ScheduleFrequence scheduleFrequence, - int workflowPriority) { - this.pointerDate = pointerDate == null ? null : new Date(pointerDate.getTime()); - this.datasetId = datasetId; - this.scheduleFrequence = scheduleFrequence; - this.workflowPriority = workflowPriority; - } - - @Override - public ObjectId getId() { - return id; - } - - @Override - public void setId(ObjectId id) { - this.id = id; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public Date getPointerDate() { - return pointerDate == null?null:new Date(pointerDate.getTime()); - } - - public void setPointerDate(Date pointerDate) { - this.pointerDate = pointerDate == null?null:new Date(pointerDate.getTime()); - } - - public ScheduleFrequence getScheduleFrequence() { - return scheduleFrequence; - } - - public void setScheduleFrequence(ScheduleFrequence scheduleFrequence) { - this.scheduleFrequence = scheduleFrequence; - } - - public int getWorkflowPriority() { - return workflowPriority; - } - - public void setWorkflowPriority(int workflowPriority) { - this.workflowPriority = workflowPriority; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/SystemId.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/SystemId.java deleted file mode 100644 index a2f0231fc6..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/SystemId.java +++ /dev/null @@ -1,11 +0,0 @@ -package eu.europeana.metis.core.workflow; - -/** - * Enum that contains values used as identifiers for system specific cancellation operations. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-02-06 - */ -public enum SystemId { - SYSTEM_MINUTE_CAP_EXPIRE, STARTED_BY_SYSTEM -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ValidationProperties.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ValidationProperties.java deleted file mode 100644 index fdb8d6f93c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/ValidationProperties.java +++ /dev/null @@ -1,40 +0,0 @@ -package eu.europeana.metis.core.workflow; - -/** - * This class represents extra properties that are needed for validation. - * - * @author jochen - * - */ -public class ValidationProperties { - - private final String urlOfSchemasZip; - private final String schemaRootPath; - private final String schematronRootPath; - - /** - * Constructor. - * - * @param urlOfSchemasZip The URL of the schemas Zip-file. - * @param schemaRootPath The path of the root schema XSL (within the Zip-file). - * @param schematronRootPath The path of the root Schematron XSL (within the Zip-file). - */ - public ValidationProperties(String urlOfSchemasZip, String schemaRootPath, - String schematronRootPath) { - this.urlOfSchemasZip = urlOfSchemasZip; - this.schemaRootPath = schemaRootPath; - this.schematronRootPath = schematronRootPath; - } - - public String getUrlOfSchemasZip() { - return urlOfSchemasZip; - } - - public String getSchemaRootPath() { - return schemaRootPath; - } - - public String getSchematronRootPath() { - return schematronRootPath; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/Workflow.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/Workflow.java deleted file mode 100644 index 2c81e471e5..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/Workflow.java +++ /dev/null @@ -1,70 +0,0 @@ -package eu.europeana.metis.core.workflow; - -import com.fasterxml.jackson.annotation.JsonPropertyOrder; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper; -import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.IndexOptions; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import java.util.ArrayList; -import java.util.List; -import org.bson.types.ObjectId; - -/** - * Workflow model class. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -@Entity -@Indexes(@Index(fields = {@Field("datasetId")}, options = @IndexOptions(unique = true))) -@JsonPropertyOrder({"id", "datasetId", "metisPluginMetadata"}) -public class Workflow implements HasMongoObjectId { - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - private String datasetId; - - @JacksonXmlElementWrapper(localName = "metisPluginsMetadatas") - @JacksonXmlProperty(localName = "metisPluginsMetadata") - private List metisPluginsMetadata = new ArrayList<>(); - - @Override - public ObjectId getId() { - return id; - } - - @Override - public void setId(ObjectId id) { - this.id = id; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public List getMetisPluginsMetadata() { - return metisPluginsMetadata != null? new ArrayList<>(metisPluginsMetadata) : null; - } - - public void setMetisPluginsMetadata( - List metisPluginsMetadata) { - if(metisPluginsMetadata != null ) { - this.metisPluginsMetadata = new ArrayList<>(metisPluginsMetadata); - } else { - this.metisPluginsMetadata = null; - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowExecution.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowExecution.java deleted file mode 100644 index 01bc3ff26d..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowExecution.java +++ /dev/null @@ -1,275 +0,0 @@ -package eu.europeana.metis.core.workflow; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import dev.morphia.annotations.Entity; -import dev.morphia.annotations.Field; -import dev.morphia.annotations.Id; -import dev.morphia.annotations.Index; -import dev.morphia.annotations.IndexOptions; -import dev.morphia.annotations.Indexes; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.mongo.model.HasMongoObjectId; -import eu.europeana.metis.mongo.utils.ObjectIdSerializer; -import eu.europeana.metis.utils.CommonStringValues; -import org.bson.types.ObjectId; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * Is the structure where the combined plugins of harvesting and the other plugins will be stored. - *

This is the object where the execution of the workflow takes place and will host all - * information, regarding its execution.

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-26 - */ -@Entity -@Indexes({ - @Index(fields = {@Field("datasetId")}), - @Index(fields = {@Field("workflowStatus")}), - @Index(fields = {@Field("ecloudDatasetId")}), - @Index(fields = {@Field("cancelledBy")}), - @Index(fields = {@Field("startedBy")}), - @Index(fields = {@Field("createdDate")}), - @Index(fields = {@Field("startedDate")}), - @Index(fields = {@Field("updatedDate")}), - @Index(fields = {@Field("finishedDate")}), - //Embedded indexes definitions should be referenced on the parent entity - // disabling index validation mapping due metisPlugins is an AbstractMetisPlugin - // so ? can be anything. Morphia has a potential feature lack when normalizing - @Index(fields = {@Field("metisPlugins.id")}, options = @IndexOptions(disableValidation = true)), - @Index(fields = {@Field("metisPlugins.startedDate")}, options = @IndexOptions(disableValidation = true)), - @Index(fields = {@Field("metisPlugins.updatedDate")}, options = @IndexOptions(disableValidation = true)), - @Index(fields = {@Field("metisPlugins.finishedDate")}, options = @IndexOptions(disableValidation = true))}) -public class WorkflowExecution implements HasMongoObjectId { - - @Id - @JsonSerialize(using = ObjectIdSerializer.class) - private ObjectId id; - private String datasetId; - private WorkflowStatus workflowStatus; - private String ecloudDatasetId; - private String cancelledBy; - private String startedBy; - private int workflowPriority; - private boolean cancelling; - - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date createdDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date startedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date updatedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date finishedDate; - - private List metisPlugins = new ArrayList<>(); - - public WorkflowExecution() { - //Required for json serialization - } - - /** - * Constructor with all required parameters and initializes it's internal structure. - * - * @param dataset the {@link Dataset} related to the execution - * @param metisPlugins the list of {@link AbstractMetisPlugin} including harvest plugin for - * execution - * @param workflowPriority the positive number of the priority of the execution - */ - public WorkflowExecution(Dataset dataset, List metisPlugins, - int workflowPriority) { - this.datasetId = dataset.getDatasetId(); - this.ecloudDatasetId = dataset.getEcloudDatasetId(); - this.workflowPriority = workflowPriority; - this.metisPlugins = new ArrayList<>(metisPlugins); - } - - /** - * Sets all plugins inside the execution, that have status {@link PluginStatus#INQUEUE} or {@link - * PluginStatus#RUNNING} or {@link PluginStatus#CLEANING} or {@link PluginStatus#PENDING}, to - * {@link PluginStatus#CANCELLED} - */ - public void setWorkflowAndAllQualifiedPluginsToCancelled() { - this.setWorkflowStatus(WorkflowStatus.CANCELLED); - setAllQualifiedPluginsToCancelled(); - this.setCancelling(false); - } - - /** - * Checks if one of the plugins has {@link PluginStatus#FAILED} and if yes sets all other plugins - * that have status {@link PluginStatus#INQUEUE} or {@link PluginStatus#RUNNING} or {@link - * PluginStatus#CLEANING} or {@link PluginStatus#PENDING}, to {@link PluginStatus#CANCELLED} - */ - public void checkAndSetAllRunningAndInqueuePluginsToCancelledIfOnePluginHasFailed() { - boolean hasAPluginFailed = false; - for (AbstractMetisPlugin metisPlugin : this.getMetisPlugins()) { - if (metisPlugin.getPluginStatus() == PluginStatus.FAILED) { - hasAPluginFailed = true; - break; - } - } - if (hasAPluginFailed) { - this.setWorkflowStatus(WorkflowStatus.FAILED); - setAllQualifiedPluginsToCancelled(); - } - } - - private void setAllQualifiedPluginsToCancelled() { - for (AbstractMetisPlugin metisPlugin : this.getMetisPlugins()) { - if (metisPlugin.getPluginStatus() == PluginStatus.INQUEUE - || metisPlugin.getPluginStatus() == PluginStatus.RUNNING - || metisPlugin.getPluginStatus() == PluginStatus.CLEANING - || metisPlugin.getPluginStatus() == PluginStatus.PENDING - || metisPlugin.getPluginStatus() == PluginStatus.IDENTIFYING_DELETED_RECORDS) { - metisPlugin.setPluginStatusAndResetFailMessage(PluginStatus.CANCELLED); - } - } - } - - /** - * Returns an {@link Optional} for the plugin with the given plugin type. - * - * @param pluginType The type of the plugin we are looking for. - * @return The plugin. - */ - public Optional getMetisPluginWithType(PluginType pluginType) { - return getMetisPlugins().stream().filter(plugin -> plugin.getPluginType() == pluginType) - .findFirst(); - } - - @Override - public ObjectId getId() { - return id; - } - - @Override - public void setId(ObjectId id) { - this.id = id; - } - - public boolean isCancelling() { - return cancelling; - } - - public void setCancelling(boolean cancelling) { - this.cancelling = cancelling; - } - - public WorkflowStatus getWorkflowStatus() { - return workflowStatus; - } - - public void setWorkflowStatus(WorkflowStatus workflowStatus) { - this.workflowStatus = workflowStatus; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getCancelledBy() { - return cancelledBy; - } - - public void setCancelledBy(String cancelledBy) { - this.cancelledBy = cancelledBy; - } - - public String getStartedBy() { - return startedBy; - } - - public void setStartedBy(String startedBy) { - this.startedBy = startedBy; - } - - public String getEcloudDatasetId() { - return ecloudDatasetId; - } - - public void setEcloudDatasetId(String ecloudDatasetId) { - this.ecloudDatasetId = ecloudDatasetId; - } - - public int getWorkflowPriority() { - return workflowPriority; - } - - public void setWorkflowPriority(int workflowPriority) { - this.workflowPriority = workflowPriority; - } - - public Date getCreatedDate() { - return createdDate == null ? null : new Date(createdDate.getTime()); - } - - public void setCreatedDate(Date createdDate) { - this.createdDate = createdDate == null ? null : new Date(createdDate.getTime()); - } - - public Date getStartedDate() { - return startedDate == null ? null : new Date(startedDate.getTime()); - } - - public void setStartedDate(Date startedDate) { - this.startedDate = startedDate == null ? null : new Date(startedDate.getTime()); - } - - public Date getFinishedDate() { - return finishedDate == null ? null : new Date(finishedDate.getTime()); - } - - public void setFinishedDate(Date finishedDate) { - this.finishedDate = finishedDate == null ? null : new Date(finishedDate.getTime()); - } - - public Date getUpdatedDate() { - return updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - public void setUpdatedDate(Date updatedDate) { - this.updatedDate = updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - public List getMetisPlugins() { - return metisPlugins; - } - - public void setMetisPlugins(List metisPlugins) { - if(metisPlugins != null) { - this.metisPlugins = new ArrayList<>(metisPlugins); - } else { - this.metisPlugins = null; - } - } - - @Override - public int hashCode() { - return Objects.hash(id, datasetId); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj == null || obj.getClass() != this.getClass()) { - return false; - } - WorkflowExecution that = (WorkflowExecution) obj; - return Objects.equals(id, that.getId()) && Objects.equals(datasetId, that.datasetId); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowStatus.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowStatus.java deleted file mode 100644 index b686ef954e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/WorkflowStatus.java +++ /dev/null @@ -1,11 +0,0 @@ -package eu.europeana.metis.core.workflow; - -/** - * Enumeration with all workflow statuses. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-26 - */ -public enum WorkflowStatus { - INQUEUE, RUNNING, FINISHED, FAILED, CANCELLED -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePlugin.java deleted file mode 100644 index 6135f45630..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePlugin.java +++ /dev/null @@ -1,312 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.Revision; -import eu.europeana.cloud.common.model.dps.TaskInfo; -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.InputDataType; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import eu.europeana.cloud.service.dps.exception.AccessDeniedOrObjectDoesNotExistException; -import eu.europeana.cloud.service.dps.exception.DpsException; -import eu.europeana.metis.core.workflow.SystemId; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.UnrecoverableExternalTaskException; -import eu.europeana.metis.utils.CommonStringValues; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Collections; -import java.util.Date; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This abstract class is the base implementation of {@link ExecutablePlugin} and all executable plugins should inherit from it. - * - * @param The type of the plugin metadata that this plugin represents. - */ -public abstract class AbstractExecutablePlugin extends - AbstractMetisPlugin implements ExecutablePlugin { - - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractExecutablePlugin.class); - - private String externalTaskId; - private ExecutionProgress executionProgress = new ExecutionProgress(); - - /** - * Required by (de)serialization in db. - *

It is not to be used manually

- */ - AbstractExecutablePlugin() { - //Required by (de)serialization in db - } - - /** - * Constructor with provided pluginType - * - * @param pluginType {@link PluginType} - */ - AbstractExecutablePlugin(PluginType pluginType) { - super(pluginType); - } - - /** - * Constructor to initialize the plugin with pluginMetadata required and the pluginType. - * - * @param pluginType a {@link PluginType} related to the implemented plugin - * @param pluginMetadata the plugin metadata - */ - AbstractExecutablePlugin(PluginType pluginType, M pluginMetadata) { - super(pluginType, pluginMetadata); - } - - @Override - public String getExternalTaskId() { - return this.externalTaskId; - } - - /** - * @param externalTaskId String representation of the external task identifier of the execution - */ - public void setExternalTaskId(String externalTaskId) { - this.externalTaskId = externalTaskId; - } - - @Override - public ExecutionProgress getExecutionProgress() { - return this.executionProgress; - } - - /** - * @param executionProgress {@link ExecutionProgress} of the external execution - */ - public void setExecutionProgress(ExecutionProgress executionProgress) { - this.executionProgress = executionProgress; - } - - // TODO: 24/08/2023 This last boolean needs to be cleaned. - private Revision createOutputRevisionForExecution(String ecloudProvider, boolean published) { - return new Revision(getPluginType().name(), ecloudProvider, getStartedDate(), false); - } - - private DpsTask createDpsTaskForPluginWithExistingDataset(Map parameters, - DpsTaskSettings dpsTaskSettings, boolean publish) { - DpsTask dpsTask = new DpsTask(); - - Map> dataEntries = new EnumMap<>(InputDataType.class); - dataEntries.put(InputDataType.DATASET_URLS, Collections.singletonList(String - .format(CommonStringValues.S_DATA_PROVIDERS_S_DATA_SETS_S_TEMPLATE, dpsTaskSettings.getEcloudBaseUrl(), - dpsTaskSettings.getEcloudProvider(), dpsTaskSettings.getEcloudDatasetId()))); - dpsTask.setInputData(dataEntries); - - dpsTask.setParameters(parameters); - dpsTask.setOutputRevision(createOutputRevisionForExecution(dpsTaskSettings.getEcloudProvider(), publish)); - return dpsTask; - } - - DpsTask createDpsTaskForHarvestPlugin(DpsTaskSettings dpsTaskSettings, - Map extraParameters, String targetUrl, boolean incrementalProcessing) { - DpsTask dpsTask = new DpsTask(); - - Map> dataEntries = new EnumMap<>(InputDataType.class); - dataEntries.put(InputDataType.REPOSITORY_URLS, Collections.singletonList(targetUrl)); - dpsTask.setInputData(dataEntries); - - Map parameters = new HashMap<>(); - if (extraParameters != null) { - parameters.putAll(extraParameters); - } - - final DateFormat dateFormat = new SimpleDateFormat(CommonStringValues.DATE_FORMAT_Z, Locale.US); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - parameters.put(PluginParameterKeys.INCREMENTAL_HARVEST, String.valueOf(incrementalProcessing)); - parameters.put(PluginParameterKeys.HARVEST_DATE, dateFormat.format(getStartedDate())); - parameters.put(PluginParameterKeys.PROVIDER_ID, dpsTaskSettings.getEcloudProvider()); - parameters.put(PluginParameterKeys.OUTPUT_DATA_SETS, String - .format(CommonStringValues.S_DATA_PROVIDERS_S_DATA_SETS_S_TEMPLATE, dpsTaskSettings.getEcloudBaseUrl(), - dpsTaskSettings.getEcloudProvider(), dpsTaskSettings.getEcloudDatasetId())); - parameters.put(PluginParameterKeys.NEW_REPRESENTATION_NAME, MetisPlugin.getRepresentationName()); - dpsTask.setParameters(parameters); - - dpsTask.setOutputRevision(createOutputRevisionForExecution(dpsTaskSettings.getEcloudProvider(), false)); - return dpsTask; - } - - DpsTask createDpsTaskForProcessPlugin(DpsTaskSettings dpsTaskSettings, - Map extraParameters) { - Map parameters = new HashMap<>(); - if (extraParameters != null) { - parameters.putAll(extraParameters); - } - parameters.put(PluginParameterKeys.REPRESENTATION_NAME, MetisPlugin.getRepresentationName()); - parameters.put(PluginParameterKeys.REVISION_NAME, getPluginMetadata().getRevisionNamePreviousPlugin()); - parameters.put(PluginParameterKeys.REVISION_PROVIDER, dpsTaskSettings.getEcloudProvider()); - DateFormat dateFormat = new SimpleDateFormat(CommonStringValues.DATE_FORMAT_Z, Locale.US); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - parameters - .put(PluginParameterKeys.REVISION_TIMESTAMP, dateFormat.format(getPluginMetadata().getRevisionTimestampPreviousPlugin())); - parameters.put(PluginParameterKeys.PREVIOUS_TASK_ID, dpsTaskSettings.getPreviousExternalTaskId()); - parameters.put(PluginParameterKeys.NEW_REPRESENTATION_NAME, MetisPlugin.getRepresentationName()); - parameters.put(PluginParameterKeys.OUTPUT_DATA_SETS, String - .format(CommonStringValues.S_DATA_PROVIDERS_S_DATA_SETS_S_TEMPLATE, dpsTaskSettings.getEcloudBaseUrl(), - dpsTaskSettings.getEcloudProvider(), dpsTaskSettings.getEcloudDatasetId())); - return createDpsTaskForPluginWithExistingDataset(parameters, dpsTaskSettings, false); - } - - DpsTask createDpsTaskForIndexPlugin(DpsTaskSettings dpsTaskSettings, String datasetId, - boolean incrementalIndexing, Date harvestDate, boolean preserveTimestamps, - List datasetIdsToRedirectFrom, boolean performRedirects, String targetDatabase) { - final DateFormat dateFormat = new SimpleDateFormat(CommonStringValues.DATE_FORMAT_Z, Locale.US); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - final Map extraParameters = new HashMap<>(); - extraParameters.put(PluginParameterKeys.METIS_DATASET_ID, datasetId); - extraParameters.put(PluginParameterKeys.INCREMENTAL_INDEXING, String.valueOf(incrementalIndexing)); - extraParameters.put(PluginParameterKeys.HARVEST_DATE, dateFormat.format(harvestDate)); - extraParameters.put(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE, targetDatabase); - extraParameters.put(PluginParameterKeys.METIS_RECORD_DATE, dateFormat.format(getStartedDate())); - extraParameters.put(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS, String.valueOf(preserveTimestamps)); - extraParameters.put(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM, String.join(",", datasetIdsToRedirectFrom)); - extraParameters.put(PluginParameterKeys.PERFORM_REDIRECTS, String.valueOf(performRedirects)); - return createDpsTaskForProcessPlugin(dpsTaskSettings, extraParameters); - } - - Map createParametersForValidationExternal(String urlOfSchemasZip, String schemaRootPath, - String schematronRootPath) { - final Map parametersForValidation = createParametersForValidation(urlOfSchemasZip, schemaRootPath, - schematronRootPath); - parametersForValidation.put(PluginParameterKeys.GENERATE_STATS, Boolean.TRUE.toString()); - return parametersForValidation; - } - - Map createParametersForValidationInternal(String urlOfSchemasZip, String schemaRootPath, - String schematronRootPath) { - final Map parametersForValidation = createParametersForValidation(urlOfSchemasZip, schemaRootPath, - schematronRootPath); - parametersForValidation.put(PluginParameterKeys.GENERATE_STATS, Boolean.FALSE.toString()); - return parametersForValidation; - } - - private Map createParametersForValidation(String urlOfSchemasZip, String schemaRootPath, - String schematronRootPath) { - Map extraParameters = new HashMap<>(); - extraParameters.put(PluginParameterKeys.SCHEMA_NAME, urlOfSchemasZip); - extraParameters.put(PluginParameterKeys.ROOT_LOCATION, schemaRootPath); - extraParameters.put(PluginParameterKeys.SCHEMATRON_LOCATION, schematronRootPath); - return extraParameters; - } - - /** - * Prepare the {@link DpsTask} based on the specific implementation of the plugin. - * - * @param dpsTaskSettings the basic parameter required for each execution - * @return the {@link DpsTask} prepared with all the required parameters - */ - abstract DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings); - - @Override - public void execute(String datasetId, DpsClient dpsClient, DpsTaskSettings dpsTaskSettings) - throws ExternalTaskException { - String pluginTypeName = getPluginType().name(); - LOGGER.info("Starting execution of {} plugin for ecloudDatasetId {}", pluginTypeName, - dpsTaskSettings.getEcloudDatasetId()); - - DpsTask dpsTask = prepareDpsTask(datasetId, dpsTaskSettings); - try { - setExternalTaskId(Long.toString(dpsClient.submitTask(dpsTask, getTopologyName()))); - setDataStatus(DataStatus.VALID); - } catch (DpsException | RuntimeException e) { - throw new ExternalTaskException("Submitting task failed", e); - } - LOGGER.info("Submitted task with externalTaskId: {}", getExternalTaskId()); - } - - @Override - public MonitorResult monitor(DpsClient dpsClient) throws ExternalTaskException, UnrecoverableExternalTaskException { - LOGGER.info("Requesting progress information for externalTaskId: {}", getExternalTaskId()); - TaskInfo taskInfo; - try { - taskInfo = dpsClient.getTaskProgress(getTopologyName(), Long.parseLong(getExternalTaskId())); - } catch (AccessDeniedOrObjectDoesNotExistException e) { - throw new UnrecoverableExternalTaskException("Requesting task progress failed", e); - } catch (DpsException | RuntimeException e) { - throw new ExternalTaskException("Requesting task progress failed", e); - } - LOGGER.info("Task information received for externalTaskId: {}", getExternalTaskId()); - updateExecutionProgress(taskInfo); - return new MonitorResult(taskInfo.getState(), taskInfo.getStateDescription()); - } - - /** - * Update this object's {@link ExecutionProgress} based on the received {@link TaskInfo}. - * - * @param taskInfo {@link TaskInfo} - */ - void updateExecutionProgress(TaskInfo taskInfo) { - - // Calculate the various counts. - // The expectedRecordsNumber we get from ecloud is dynamic and can change during execution. - int expectedRecordCount; - int processedRecordCount; - int deletedRecordCount; - - if (getPluginMetadata() instanceof AbstractHarvestPluginMetadata && ((AbstractHarvestPluginMetadata) this.getPluginMetadata()) - .isIncrementalHarvest()) { - //Incremental Harvest - //deletedRecordsCount never used - //expectedPostProcessedRecordsNumber and postProcessedRecordsCount represent deleted records - expectedRecordCount = taskInfo.getExpectedRecordsNumber(); - processedRecordCount = taskInfo.getProcessedRecordsCount() + taskInfo.getIgnoredRecordsCount(); - deletedRecordCount = taskInfo.getPostProcessedRecordsCount(); - } else if (getPluginMetadata() instanceof AbstractHarvestPluginMetadata) { - //Full Harvest - //expectedPostProcessedRecordsNumber, postProcessedRecordsCount and ignoredRecordsCount not used - //deletedRecordsCount is always 0 - expectedRecordCount = taskInfo.getExpectedRecordsNumber(); - processedRecordCount = taskInfo.getProcessedRecordsCount(); - deletedRecordCount = taskInfo.getDeletedRecordsCount(); - } else if (getPluginMetadata() instanceof AbstractIndexPluginMetadata && !((AbstractIndexPluginMetadata) this - .getPluginMetadata()).isIncrementalIndexing()) { - //Full Indexing - //ignoredRecordsCount never used - //expectedPostProcessedRecordsNumber and postProcessedRecordsCount represent deleted records - //The deletedRecordsCount is always 0 - expectedRecordCount = taskInfo.getExpectedRecordsNumber(); - processedRecordCount = taskInfo.getProcessedRecordsCount(); - deletedRecordCount = taskInfo.getPostProcessedRecordsCount(); - } else { - //Other plugins including incremental indexing - //expectedPostProcessedRecordsNumber, postProcessedRecordsCount and ignoredRecordsCount not used - expectedRecordCount = taskInfo.getExpectedRecordsNumber() - taskInfo.getDeletedRecordsCount(); - processedRecordCount = taskInfo.getProcessedRecordsCount(); - deletedRecordCount = taskInfo.getDeletedRecordsCount(); - } - - int errorCount = taskInfo.getProcessedErrorsCount() + taskInfo.getDeletedErrorsCount(); - int ignoredCount = taskInfo.getIgnoredRecordsCount(); - - // Update the execution progress. - getExecutionProgress().setExpectedRecords(expectedRecordCount); - getExecutionProgress().setProcessedRecords(processedRecordCount); - getExecutionProgress().setDeletedRecords(deletedRecordCount); - getExecutionProgress().setIgnoredRecords(ignoredCount); - getExecutionProgress().setErrors(errorCount); - getExecutionProgress().recalculateProgressPercentage(); - getExecutionProgress().setStatus(taskInfo.getState()); - } - - @Override - public void cancel(DpsClient dpsClient, String cancelledById) throws ExternalTaskException { - LOGGER.info("Cancel execution for externalTaskId: {}", getExternalTaskId()); - try { - dpsClient.killTask(getTopologyName(), Long.parseLong(getExternalTaskId()), - SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name().equals(cancelledById) ? "Cancelled By System" : "Cancelled By User"); - } catch (DpsException | RuntimeException e) { - throw new ExternalTaskException("Requesting task cancellation failed", e); - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePluginMetadata.java deleted file mode 100644 index 98f6175320..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractExecutablePluginMetadata.java +++ /dev/null @@ -1,29 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This abstract class is the base implementation of {@link ExecutablePluginMetadata} and all - * executable plugins should inherit from it. - */ -public abstract class AbstractExecutablePluginMetadata extends AbstractMetisPluginMetadata - implements ExecutablePluginMetadata { - - private boolean enabled; - - public AbstractExecutablePluginMetadata() { - } - - @Override - public final PluginType getPluginType() { - return getExecutablePluginType().toPluginType(); - } - - @Override - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractHarvestPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractHarvestPluginMetadata.java deleted file mode 100644 index 19ee4a1809..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractHarvestPluginMetadata.java +++ /dev/null @@ -1,25 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This abstract class is the base implementation of {@link ExecutablePluginMetadata} for harvest - * tasks. All executable harvest plugins should inherit from it. - */ -public abstract class AbstractHarvestPluginMetadata extends AbstractExecutablePluginMetadata { - - //Default false. If false, it indicates that the ProvidedCHO rdf:about should be used to set the identifier for ECloud - private boolean useDefaultIdentifiers; - - public AbstractHarvestPluginMetadata() { - //Required for json serialization - } - - public boolean isUseDefaultIdentifiers() { - return useDefaultIdentifiers; - } - - public void setUseDefaultIdentifiers(boolean useDefaultIdentifiers) { - this.useDefaultIdentifiers = useDefaultIdentifiers; - } - - public abstract boolean isIncrementalHarvest(); -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractIndexPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractIndexPluginMetadata.java deleted file mode 100644 index ccf8a7c77f..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractIndexPluginMetadata.java +++ /dev/null @@ -1,64 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -/** - * This abstract class is the base implementation of {@link ExecutablePluginMetadata} for index - * tasks. All executable index plugins should inherit from it. - */ -public abstract class AbstractIndexPluginMetadata extends AbstractExecutablePluginMetadata { - - private boolean preserveTimestamps; - private boolean performRedirects; - private List datasetIdsToRedirectFrom = new ArrayList<>(); - private boolean incrementalIndexing; // Default: false (i.e. full processing) - private Date harvestDate; - - public AbstractIndexPluginMetadata() { - //Required for json serialization - } - - public boolean isPreserveTimestamps() { - return preserveTimestamps; - } - - public void setPreserveTimestamps(boolean preserveTimestamps) { - this.preserveTimestamps = preserveTimestamps; - } - - public boolean isPerformRedirects() { - return performRedirects; - } - - public void setPerformRedirects(boolean performRedirects) { - this.performRedirects = performRedirects; - } - - public List getDatasetIdsToRedirectFrom() { - return new ArrayList<>(datasetIdsToRedirectFrom); - } - - public void setDatasetIdsToRedirectFrom(List datasetIdsToRedirectFrom) { - this.datasetIdsToRedirectFrom = - datasetIdsToRedirectFrom == null ? new ArrayList<>() : new ArrayList<>( - datasetIdsToRedirectFrom); - } - - public boolean isIncrementalIndexing() { - return incrementalIndexing; - } - - public void setIncrementalIndexing(boolean incrementalIndexing) { - this.incrementalIndexing = incrementalIndexing; - } - - public Date getHarvestDate() { - return harvestDate; - } - - public void setHarvestDate(Date harvestDate) { - this.harvestDate = harvestDate; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPlugin.java deleted file mode 100644 index 073c7bb34f..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPlugin.java +++ /dev/null @@ -1,178 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeInfo.As; -import dev.morphia.annotations.Entity; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.Date; - -/** - * This abstract class is the base implementation of {@link MetisPlugin} and all other plugins - * should inherit from it. - * - * @param The type of the plugin metadata that this plugin represents. - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-06-01 - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = As.EXISTING_PROPERTY, property = "pluginType") -@JsonSubTypes({ - @JsonSubTypes.Type(value = OaipmhHarvestPlugin.class, name = "OAIPMH_HARVEST"), - @JsonSubTypes.Type(value = HTTPHarvestPlugin.class, name = "HTTP_HARVEST"), - @JsonSubTypes.Type(value = ValidationInternalPlugin.class, name = "VALIDATION_INTERNAL"), - @JsonSubTypes.Type(value = TransformationPlugin.class, name = "TRANSFORMATION"), - @JsonSubTypes.Type(value = ValidationExternalPlugin.class, name = "VALIDATION_EXTERNAL"), - @JsonSubTypes.Type(value = NormalizationPlugin.class, name = "NORMALIZATION"), - @JsonSubTypes.Type(value = EnrichmentPlugin.class, name = "ENRICHMENT"), - @JsonSubTypes.Type(value = MediaProcessPlugin.class, name = "MEDIA_PROCESS"), - @JsonSubTypes.Type(value = LinkCheckingPlugin.class, name = "LINK_CHECKING"), - @JsonSubTypes.Type(value = IndexToPreviewPlugin.class, name = "PREVIEW"), - @JsonSubTypes.Type(value = IndexToPublishPlugin.class, name = "PUBLISH") -}) -@Entity -public abstract class AbstractMetisPlugin implements - MetisPlugin { - - protected PluginType pluginType; - private String id; - - private PluginStatus pluginStatus = PluginStatus.INQUEUE; - private DataStatus dataStatus; - private String failMessage; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date startedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date updatedDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date finishedDate; - private M pluginMetadata; - - - /** - * Required by (de)serialization in db. - *

It is not to be used manually

- */ - protected AbstractMetisPlugin() { - //Required by (de)serialization in db - } - - /** - * Constructor with provided pluginType - * - * @param pluginType {@link PluginType} - */ - protected AbstractMetisPlugin(PluginType pluginType) { - this.pluginType = pluginType; - } - - /** - * Constructor to initialize the plugin with pluginMetadata required and the pluginType. - * - * @param pluginType a {@link PluginType} related to the implemented plugin - * @param pluginMetadata The plugin metadata. - */ - AbstractMetisPlugin(PluginType pluginType, M pluginMetadata) { - this.pluginType = pluginType; - this.pluginMetadata = pluginMetadata; - } - - @Override - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - @Override - public PluginType getPluginType() { - return pluginType; - } - - @Override - public DataStatus getDataStatus() { - return dataStatus; - } - - public void setDataStatus(DataStatus dataStatus) { - this.dataStatus = dataStatus; - } - - @Override - public M getPluginMetadata() { - return pluginMetadata; - } - - public void setPluginMetadata(M pluginMetadata) { - this.pluginMetadata = pluginMetadata; - } - - @Override - public Date getStartedDate() { - return startedDate == null ? null : new Date(startedDate.getTime()); - } - - /** - * @param startedDate {@link Date} - */ - public void setStartedDate(Date startedDate) { - this.startedDate = startedDate == null ? null : new Date(startedDate.getTime()); - } - - @Override - public Date getUpdatedDate() { - return updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - /** - * @param updatedDate {@link Date} - */ - public void setUpdatedDate(Date updatedDate) { - this.updatedDate = updatedDate == null ? null : new Date(updatedDate.getTime()); - } - - @Override - public Date getFinishedDate() { - return finishedDate == null ? null : new Date(finishedDate.getTime()); - } - - /** - * @param finishedDate {@link Date} - */ - public void setFinishedDate(Date finishedDate) { - this.finishedDate = finishedDate == null ? null : new Date(finishedDate.getTime()); - } - - @Override - public PluginStatus getPluginStatus() { - return pluginStatus; - } - - /** - * @param pluginStatus {@link PluginStatus} - */ - public void setPluginStatus(PluginStatus pluginStatus) { - this.pluginStatus = pluginStatus; - } - - /** - * This method sets the plugin status and also clears the fail message. - * - * @param pluginStatus {@link PluginStatus} - */ - public void setPluginStatusAndResetFailMessage(PluginStatus pluginStatus) { - setPluginStatus(pluginStatus); - setFailMessage(null); - } - - @Override - public String getFailMessage() { - return failMessage; - } - - public void setFailMessage(String failMessage) { - this.failMessage = failMessage; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPluginMetadata.java deleted file mode 100644 index 4a6ac83b6e..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/AbstractMetisPluginMetadata.java +++ /dev/null @@ -1,71 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeInfo.As; -import dev.morphia.annotations.Entity; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.Date; - -/** - * This abstract class is the base implementation of {@link MetisPluginMetadata} and all other - * plugins should inherit from it. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-06-01 - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = As.EXISTING_PROPERTY, property = "pluginType") -@JsonSubTypes({ - @JsonSubTypes.Type(value = OaipmhHarvestPluginMetadata.class, name = "OAIPMH_HARVEST"), - @JsonSubTypes.Type(value = HTTPHarvestPluginMetadata.class, name = "HTTP_HARVEST"), - @JsonSubTypes.Type(value = ValidationExternalPluginMetadata.class, name = "VALIDATION_EXTERNAL"), - @JsonSubTypes.Type(value = TransformationPluginMetadata.class, name = "TRANSFORMATION"), - @JsonSubTypes.Type(value = ValidationInternalPluginMetadata.class, name = "VALIDATION_INTERNAL"), - @JsonSubTypes.Type(value = NormalizationPluginMetadata.class, name = "NORMALIZATION"), - @JsonSubTypes.Type(value = EnrichmentPluginMetadata.class, name = "ENRICHMENT"), - @JsonSubTypes.Type(value = MediaProcessPluginMetadata.class, name = "MEDIA_PROCESS"), - @JsonSubTypes.Type(value = LinkCheckingPluginMetadata.class, name = "LINK_CHECKING"), - @JsonSubTypes.Type(value = IndexToPreviewPluginMetadata.class, name = "PREVIEW"), - @JsonSubTypes.Type(value = IndexToPublishPluginMetadata.class, name = "PUBLISH") -}) -@Entity -public abstract class AbstractMetisPluginMetadata implements MetisPluginMetadata { - - private String revisionNamePreviousPlugin; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date revisionTimestampPreviousPlugin; - - public AbstractMetisPluginMetadata() { - } - - @Override - public String getRevisionNamePreviousPlugin() { - return revisionNamePreviousPlugin; - } - - public void setRevisionNamePreviousPlugin(String revisionNamePreviousPlugin) { - this.revisionNamePreviousPlugin = revisionNamePreviousPlugin; - } - - @Override - public Date getRevisionTimestampPreviousPlugin() { - return revisionTimestampPreviousPlugin == null ? null - : new Date(revisionTimestampPreviousPlugin.getTime()); - } - - public void setRevisionTimestampPreviousPlugin(Date revisionTimestampPreviousPlugin) { - this.revisionTimestampPreviousPlugin = revisionTimestampPreviousPlugin == null ? null - : new Date(revisionTimestampPreviousPlugin.getTime()); - } - - /** - * For the current plugin, setup the source/previous revision information. - * - * @param predecessor the predecessor plugin that the current plugin is based on. Is not null. - */ - public void setPreviousRevisionInformation(ExecutablePlugin predecessor) { - this.setRevisionNamePreviousPlugin(predecessor.getPluginType().name()); - this.setRevisionTimestampPreviousPlugin(predecessor.getStartedDate()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DataStatus.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DataStatus.java deleted file mode 100644 index 4f489bbb7f..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DataStatus.java +++ /dev/null @@ -1,35 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This enum lists the various states the result data of an executable plugin can be in when it - * comes to presence in eCloud. This does not attempt to say anything about the quality of the data - * (e.g. if it is complete or if there are errors). Furthermore, data here denotes not just the - * result of the plugin, but also all associated logs and error reports, etc. - */ -public enum DataStatus { - - /** - * The result data of this executable plugin is not yet present because the executable plugin has - * not (started to) run yet. - */ - NOT_YET_GENERATED, - - /** - * The result data of this executable plugin is available and can be used. - */ - VALID, - - /** - * The result data of this executable plugin is available, but it has been deprecated and should - * no longer be used for further processing (e.g. superseded by a non-executable plugin or an - * indexing removed from mongo/solr). - */ - DEPRECATED, - - /** - * The result data of this executable plugin is no longer available. It has been processed but the - * data has subsequently been removed and is not available for further processing. - */ - DELETED - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPlugin.java deleted file mode 100644 index bd2a19bfcc..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPlugin.java +++ /dev/null @@ -1,70 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import eu.europeana.metis.core.common.RecordIdUtils; -import java.util.HashMap; -import java.util.Map; -import org.springframework.util.CollectionUtils; - -/** - * Depublish Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2020-06-16 - */ -public class DepublishPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.DEPUBLISH.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - DepublishPlugin() { - //Required for json serialization - this(null); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - public DepublishPlugin(DepublishPluginMetadata pluginMetadata) { - super(PluginType.DEPUBLISH, pluginMetadata); - } - - /** - * Required for json serialization. - * - * @return the String representation of the topology - */ - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - public DpsTask prepareDpsTask(String datasetId, - DpsTaskSettings dpsTaskSettings) { - - Map extraParameters = new HashMap<>(); - extraParameters.put(PluginParameterKeys.METIS_DATASET_ID, datasetId); - //Do set the records ids parameter only if record ids depublication enabled and there are record ids - if (!getPluginMetadata().isDatasetDepublish()) { - if (CollectionUtils.isEmpty(getPluginMetadata().getRecordIdsToDepublish())) { - throw new IllegalStateException( - "Requested record depublication but there are no records ids for depublication in the db"); - } else { - final String recordIdList = String.join(",", RecordIdUtils - .composeFullRecordIds(datasetId, getPluginMetadata().getRecordIdsToDepublish())); - extraParameters.put(PluginParameterKeys.RECORD_IDS_TO_DEPUBLISH, recordIdList); - } - } - DpsTask dpsTask = new DpsTask(); - dpsTask.setParameters(extraParameters); - return dpsTask; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPluginMetadata.java deleted file mode 100644 index 0d615b393a..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DepublishPluginMetadata.java +++ /dev/null @@ -1,46 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - - -import java.util.Collections; -import java.util.HashSet; -import java.util.Optional; -import java.util.Set; - -/** - * Index to Publish Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2020-06-16 - */ -public class DepublishPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.DEPUBLISH; - private boolean datasetDepublish; - private Set recordIdsToDepublish; - - public DepublishPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public boolean isDatasetDepublish() { - return datasetDepublish; - } - - public void setDatasetDepublish(boolean datasetDepublish) { - this.datasetDepublish = datasetDepublish; - } - - public Set getRecordIdsToDepublish() { - return Optional.ofNullable(recordIdsToDepublish).map(Collections::unmodifiableSet) - .orElseGet(Collections::emptySet); - } - - public void setRecordIdsToDepublish(Set recordIdsToDepublish) { - this.recordIdsToDepublish = new HashSet<>(recordIdsToDepublish); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DpsTaskSettings.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DpsTaskSettings.java deleted file mode 100644 index 85fd0baf43..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/DpsTaskSettings.java +++ /dev/null @@ -1,63 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Class that contains basic parameters required for each {@link eu.europeana.cloud.service.dps.DpsTask} - * that is sent to ECloud. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-03-19 - */ -public class DpsTaskSettings { - - private final String ecloudBaseUrl; - private final String ecloudProvider; - private final String ecloudDatasetId; - private final String metisCoreBaseUrl; - private final String previousExternalTaskId; - private final ThrottlingValues throttlingValues; - - /** - * Constructor with all parameters. - * - * @param ecloudBaseUrl the base url endpoint to ecloud api - * @param ecloudProvider the ecloud provider to use - * @param ecloudDatasetId the ecloud dataset to use - * @param previousExternalTaskId the task identifier from the previous plugin execution. It is - * used to calculate faster the total records for the current execution on the ecloud side. Can be - * null if there is no previous task, like for example a harvesting plugin. - * @param metisCoreBaseUrl the base url of metis core application - */ - public DpsTaskSettings(String ecloudBaseUrl, String ecloudProvider, - String ecloudDatasetId, String previousExternalTaskId, String metisCoreBaseUrl, ThrottlingValues throttlingValues) { - this.ecloudBaseUrl = ecloudBaseUrl; - this.ecloudProvider = ecloudProvider; - this.ecloudDatasetId = ecloudDatasetId; - this.metisCoreBaseUrl = metisCoreBaseUrl; - this.previousExternalTaskId = previousExternalTaskId; - this.throttlingValues = throttlingValues; - } - - public String getEcloudBaseUrl() { - return ecloudBaseUrl; - } - - public String getEcloudProvider() { - return ecloudProvider; - } - - public String getEcloudDatasetId() { - return ecloudDatasetId; - } - - public String getPreviousExternalTaskId() { - return previousExternalTaskId; - } - - public String getMetisCoreBaseUrl() { - return metisCoreBaseUrl; - } - - public ThrottlingValues getThrottlingValues(){ - return throttlingValues; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPlugin.java deleted file mode 100644 index f2608ccadc..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPlugin.java +++ /dev/null @@ -1,43 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; - -/** - * Enrichment Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-26 - */ -public class EnrichmentPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.ENRICHMENT.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - EnrichmentPlugin() { - //Required for json serialization - super(PluginType.ENRICHMENT); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - EnrichmentPlugin(EnrichmentPluginMetadata pluginMetadata) { - super(PluginType.ENRICHMENT, pluginMetadata); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - return createDpsTaskForProcessPlugin(dpsTaskSettings, null); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPluginMetadata.java deleted file mode 100644 index 2f04d34586..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/EnrichmentPluginMetadata.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Enrichment Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -public class EnrichmentPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.ENRICHMENT; - - public EnrichmentPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePlugin.java deleted file mode 100644 index 4ce0118c42..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePlugin.java +++ /dev/null @@ -1,103 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.UnrecoverableExternalTaskException; - -/** - * This interface represents plugins that are executable by Metis. - */ -public interface ExecutablePlugin extends MetisPlugin { - - /** - * The metadata corresponding to this plugin. - * - * @return {@link ExecutablePluginMetadata} - */ - @Override - ExecutablePluginMetadata getPluginMetadata(); - - /** - * @return String representation of the external task identifier of the execution - */ - String getExternalTaskId(); - - /** - * Progress information of the execution of the plugin - * - * @return {@link ExecutionProgress} - */ - ExecutionProgress getExecutionProgress(); - - /** - * It is required as an abstract method to have proper serialization on the api level. - * - * @return the topologyName string coming from {@link Topology} - */ - String getTopologyName(); - - /** - * Starts the execution of the plugin at the external location. - *

It is non blocking method and the {@link #monitor(DpsClient)} should be used to monitor the - * external execution

- * - * @param datasetId the dataset id that is required for some of the plugins - * @param dpsClient {@link DpsClient} used to submit the external execution - * @param dpsTaskSettings the basic parameter required for each execution - * @throws ExternalTaskException exceptions that encapsulates the external occurred exception - */ - void execute(String datasetId, DpsClient dpsClient, - DpsTaskSettings dpsTaskSettings) - throws ExternalTaskException; - - /** - * Request a monitor call to the external execution. This method also updates the execution - * progress statistics. - * - * @param dpsClient {@link DpsClient} used to request a monitor call the external execution - * @return {@link AbstractExecutablePlugin.MonitorResult} object containing the current state of - * the task. - * @throws ExternalTaskException exceptions that encapsulates the external occurred exception - */ - MonitorResult monitor(DpsClient dpsClient) throws ExternalTaskException, UnrecoverableExternalTaskException; - - /** - * Request a cancel call to the external execution. - * - * @param dpsClient {@link DpsClient} used to request a monitor call the external execution - * @param cancelledById the reason a task is being cancelled, is it a user identifier of a system - * identifier - * @throws ExternalTaskException exceptions that encapsulates the external occurred exception - */ - void cancel(DpsClient dpsClient, String cancelledById) throws ExternalTaskException; - - /** - * This object represents the result of a monitor call. It contains the information that - * monitoring processes need. - */ - class MonitorResult { - - private final TaskState taskState; - private final String taskInfo; - - /** - * Constructor. - * - * @param taskState The current state of the task. - * @param taskInfo The info message. Can be null or empty. - */ - public MonitorResult(TaskState taskState, String taskInfo) { - this.taskState = taskState; - this.taskInfo = taskInfo; - } - - public TaskState getTaskState() { - return taskState; - } - - public String getTaskInfo() { - return taskInfo; - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginFactory.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginFactory.java deleted file mode 100644 index b6940a8b90..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginFactory.java +++ /dev/null @@ -1,89 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import java.util.Collections; -import java.util.EnumMap; -import java.util.Map; -import java.util.function.Function; -import org.bson.types.ObjectId; - -/** - * This class implements the functionality of creating executable plugins, given a plugin metadata - * object. - */ -public final class ExecutablePluginFactory { - - private static final Map> pluginCreators; - - static { - final Map> creators = new EnumMap<>( - ExecutablePluginType.class); - creators.put(ExecutablePluginType.HTTP_HARVEST, - new PluginCreator<>(HTTPHarvestPluginMetadata.class, HTTPHarvestPlugin::new)); - creators.put(ExecutablePluginType.OAIPMH_HARVEST, - new PluginCreator<>(OaipmhHarvestPluginMetadata.class, OaipmhHarvestPlugin::new)); - creators.put(ExecutablePluginType.ENRICHMENT, - new PluginCreator<>(EnrichmentPluginMetadata.class, EnrichmentPlugin::new)); - creators.put(ExecutablePluginType.MEDIA_PROCESS, - new PluginCreator<>(MediaProcessPluginMetadata.class, MediaProcessPlugin::new)); - creators.put(ExecutablePluginType.LINK_CHECKING, - new PluginCreator<>(LinkCheckingPluginMetadata.class, LinkCheckingPlugin::new)); - creators.put(ExecutablePluginType.VALIDATION_EXTERNAL, - new PluginCreator<>(ValidationExternalPluginMetadata.class, ValidationExternalPlugin::new)); - creators.put(ExecutablePluginType.TRANSFORMATION, - new PluginCreator<>(TransformationPluginMetadata.class, TransformationPlugin::new)); - creators.put(ExecutablePluginType.VALIDATION_INTERNAL, - new PluginCreator<>(ValidationInternalPluginMetadata.class, ValidationInternalPlugin::new)); - creators.put(ExecutablePluginType.NORMALIZATION, - new PluginCreator<>(NormalizationPluginMetadata.class, NormalizationPlugin::new)); - creators.put(ExecutablePluginType.PREVIEW, - new PluginCreator<>(IndexToPreviewPluginMetadata.class, IndexToPreviewPlugin::new)); - creators.put(ExecutablePluginType.PUBLISH, - new PluginCreator<>(IndexToPublishPluginMetadata.class, IndexToPublishPlugin::new)); - creators.put(ExecutablePluginType.DEPUBLISH, - new PluginCreator<>(DepublishPluginMetadata.class, DepublishPlugin::new)); - pluginCreators = Collections.unmodifiableMap(creators); - } - - private ExecutablePluginFactory() { - } - - /** - * This method creates a new plugin for the provided plugin metadata. This method also sets the - * new plugins's id and data status. - * - * @param metadata The metadata for which to create a plugin. Cannot be null. - * @return A new plugin instance. - */ - public static AbstractExecutablePlugin createPlugin(ExecutablePluginMetadata metadata) { - - // Find the right creator. - final PluginCreator creator = pluginCreators.get(metadata.getExecutablePluginType()); - if (creator == null) { - throw new IllegalStateException( - "Found unknown executable plugin type: " + metadata.getExecutablePluginType()); - } - - // Perform the creation. - final AbstractExecutablePlugin plugin = creator.createPlugin(metadata); - plugin.setId(new ObjectId().toString() + "-" + plugin.getPluginType().name()); - plugin.setDataStatus(DataStatus.NOT_YET_GENERATED); - return plugin; - } - - private static class PluginCreator { - - private Class pluginMetadataType; - private Function> pluginCreatorFunction; - - PluginCreator(Class pluginMetadataType, - Function> pluginCreatorFunction) { - this.pluginMetadataType = pluginMetadataType; - this.pluginCreatorFunction = pluginCreatorFunction; - } - - AbstractExecutablePlugin createPlugin(MetisPluginMetadata pluginMetadata) { - final M castPluginMetadata = pluginMetadataType.cast(pluginMetadata); - return pluginCreatorFunction.apply(castPluginMetadata); - } - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginMetadata.java deleted file mode 100644 index 67b91179ad..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginMetadata.java +++ /dev/null @@ -1,12 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This interface represents plugin metadata that can be executed by Metis. - */ -public interface ExecutablePluginMetadata extends MetisPluginMetadata { - - ExecutablePluginType getExecutablePluginType(); - - boolean isEnabled(); - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginType.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginType.java deleted file mode 100644 index f451b1c945..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutablePluginType.java +++ /dev/null @@ -1,83 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * This denotes a plugin type that is executable (i.e. can be run by Metis). This is a subset of the - * list in {@link PluginType}, which contains all plugin types. - */ -public enum ExecutablePluginType { - - HTTP_HARVEST(PluginType.HTTP_HARVEST), - - OAIPMH_HARVEST(PluginType.OAIPMH_HARVEST), - - ENRICHMENT(PluginType.ENRICHMENT), - - MEDIA_PROCESS(PluginType.MEDIA_PROCESS), - - LINK_CHECKING(PluginType.LINK_CHECKING), - - VALIDATION_EXTERNAL(PluginType.VALIDATION_EXTERNAL), - - TRANSFORMATION(PluginType.TRANSFORMATION), - - VALIDATION_INTERNAL(PluginType.VALIDATION_INTERNAL), - - NORMALIZATION(PluginType.NORMALIZATION), - - PREVIEW(PluginType.PREVIEW), - - PUBLISH(PluginType.PUBLISH), - - DEPUBLISH(PluginType.DEPUBLISH); - - private final PluginType pluginType; - - ExecutablePluginType(PluginType pluginType) { - this.pluginType = pluginType; - } - - /** - * @return the corresponding instance of {@link PluginType}. - */ - public PluginType toPluginType() { - return pluginType; - } - - /** - * Get the corresponding {@link ExecutablePluginType} by providing a {@link PluginType} or null if - * no match found - * - * @param pluginType the provided plugin type - * @return the executable plugin type or null if no match found - */ - public static ExecutablePluginType getExecutablePluginFromPluginType(PluginType pluginType) { - for (ExecutablePluginType executablePluginType : values()) { - if (executablePluginType.pluginType == pluginType) { - return executablePluginType; - } - } - return null; - } - - /** - * Lookup of a {@link ExecutablePluginType} enum from a provided enum String representation of the - * enum value. - * - * @param enumName the String representation of an enum value - * @return the {@link ExecutablePluginType} that represents the provided value or null if not - * found - */ - @JsonCreator - public static ExecutablePluginType getPluginTypeFromEnumName( - @JsonProperty("pluginName") String enumName) { - for (ExecutablePluginType pluginType : values()) { - if (pluginType.name().equalsIgnoreCase(enumName)) { - return pluginType; - } - } - return null; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutionProgress.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutionProgress.java deleted file mode 100644 index 7c5269b8f6..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ExecutionProgress.java +++ /dev/null @@ -1,108 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import dev.morphia.annotations.Entity; -import eu.europeana.cloud.common.model.dps.TaskState; - -/** - * Contains execution progress information of a task. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-06-01 - */ -@Entity -public class ExecutionProgress { - - // The total number of expected records excluding deleted records. - private int expectedRecords; - - // The total number of records processed so far excluding deleted records and including ignored records if applicable. - private int processedRecords; - - // The percentage: the division of the actual and expected number of processed records. - private int progressPercentage; - - // The number of processed records so far that are to be ignored for follow-up tasks. - private int ignoredRecords = 0; - - // The number of deleted records processed so far. - private int deletedRecords = 0; - - // The number of errors encountered so far. - private int errors; - - // The current state of the task. - private TaskState status; - - // TODO: 01/11/2021 The correct values should be updated with a script for the latest preview and publish executions, during release - // The total records in the database, not used to capture progress but the final result(post process check) - private int totalDatabaseRecords = -1; - - public int getExpectedRecords() { - return expectedRecords; - } - - public void setExpectedRecords(int expectedRecords) { - this.expectedRecords = expectedRecords; - } - - public int getProcessedRecords() { - return processedRecords; - } - - public void setProcessedRecords(int processedRecords) { - this.processedRecords = processedRecords; - } - - public int getProgressPercentage() { - return progressPercentage; - } - - public void setProgressPercentage(int progressPercentage) { - this.progressPercentage = progressPercentage; - } - - public int getIgnoredRecords() { - return ignoredRecords; - } - - public void setIgnoredRecords(int ignoredRecords) { - this.ignoredRecords = ignoredRecords; - } - - public int getDeletedRecords() { - return deletedRecords; - } - - public void setDeletedRecords(int deletedRecords) { - this.deletedRecords = deletedRecords; - } - - public int getErrors() { - return errors; - } - - public void setErrors(int errors) { - this.errors = errors; - } - - public TaskState getStatus() { - return status; - } - - public void setStatus(TaskState status) { - this.status = status; - } - - public int getTotalDatabaseRecords() { - return totalDatabaseRecords; - } - - public void setTotalDatabaseRecords(int totalDatabaseRecords) { - this.totalDatabaseRecords = totalDatabaseRecords; - } - - public void recalculateProgressPercentage() { - this.progressPercentage = this.expectedRecords == 0 ? 0 - : (int) Math.round(100.0 * (this.processedRecords + this.deletedRecords)/ (this.expectedRecords + this.deletedRecords)); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPlugin.java deleted file mode 100644 index 3693698b9c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPlugin.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import java.util.HashMap; -import java.util.Map; - -/** - * HTTP Harvest Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-24 - */ -public class HTTPHarvestPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.HTTP_HARVEST.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - HTTPHarvestPlugin() { - // Required for json serialization - super(PluginType.HTTP_HARVEST); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

- * Initializes the {@link #pluginType} as well. - *

- * - * @param pluginMetadata The plugin metadata. - */ - HTTPHarvestPlugin(HTTPHarvestPluginMetadata pluginMetadata) { - super(PluginType.HTTP_HARVEST, pluginMetadata); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - String targetUrl = getPluginMetadata().getUrl(); - Map parameters = new HashMap<>(); - parameters.put(PluginParameterKeys.METIS_DATASET_ID, datasetId); - return createDpsTaskForHarvestPlugin(dpsTaskSettings, parameters, targetUrl, - getPluginMetadata().isIncrementalHarvest()); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPluginMetadata.java deleted file mode 100644 index 67c5a50a00..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/HTTPHarvestPluginMetadata.java +++ /dev/null @@ -1,59 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * HTTP Harvest Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -public class HTTPHarvestPluginMetadata extends AbstractHarvestPluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.HTTP_HARVEST; - private String url; - private String user; - private String password; - private boolean incrementalHarvest; // Default: false (i.e. full harvest) - - public HTTPHarvestPluginMetadata() { - //Required for json serialization - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - @Override - public boolean isIncrementalHarvest() { - return incrementalHarvest; - } - - public void setIncrementalHarvest(boolean incrementalHarvest) { - this.incrementalHarvest = incrementalHarvest; - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPlugin.java deleted file mode 100644 index 2c918843b6..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPlugin.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.metis.indexing.TargetIndexingDatabase; - -/** - * Index to Preview Plugin. - * Note: Adding another layer of hierarchy e.g. AbstractIndexPlugin seems to not work with morphia at this point in time 18/11/2021 - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-03 - */ -public class IndexToPreviewPlugin extends AbstractExecutablePlugin { - protected final String topologyName = Topology.INDEX.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - IndexToPreviewPlugin() { - //Required for json serialization - this(null); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - IndexToPreviewPlugin(IndexToPreviewPluginMetadata pluginMetadata) { - super(PluginType.PREVIEW, pluginMetadata); - } - - @Override - public DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - return createDpsTaskForIndexPlugin(dpsTaskSettings, datasetId, - getPluginMetadata().isIncrementalIndexing(), - getPluginMetadata().getHarvestDate(), - getPluginMetadata().isPreserveTimestamps(), - getPluginMetadata().getDatasetIdsToRedirectFrom(), - getPluginMetadata().isPerformRedirects(), getTargetIndexingDatabase().name()); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - /** - * Get the target indexing database. - * - * @return the target indexing database - */ - public TargetIndexingDatabase getTargetIndexingDatabase() { - return TargetIndexingDatabase.PREVIEW; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPluginMetadata.java deleted file mode 100644 index 72db3b4675..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPreviewPluginMetadata.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Index to Preview Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-03 - */ -public class IndexToPreviewPluginMetadata extends AbstractIndexPluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.PREVIEW; - - public IndexToPreviewPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPlugin.java deleted file mode 100644 index 7a31c186e4..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPlugin.java +++ /dev/null @@ -1,57 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.metis.indexing.TargetIndexingDatabase; - -/** - * Index to Publish Plugin. - * Note: Adding another layer of hierarchy e.g. AbstractIndexPlugin seems to not work with morphia at this point in time 18/11/2021 - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-03 - */ -public class IndexToPublishPlugin extends AbstractExecutablePlugin { - protected final String topologyName = Topology.INDEX.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the plugin. - */ - IndexToPublishPlugin() { - //Required for json serialization - this(null); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - IndexToPublishPlugin(IndexToPublishPluginMetadata pluginMetadata) { - super(PluginType.PUBLISH, pluginMetadata); - } - - @Override - public DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - return createDpsTaskForIndexPlugin(dpsTaskSettings, datasetId, - getPluginMetadata().isIncrementalIndexing(), - getPluginMetadata().getHarvestDate(), - getPluginMetadata().isPreserveTimestamps(), - getPluginMetadata().getDatasetIdsToRedirectFrom(), - getPluginMetadata().isPerformRedirects(), getTargetIndexingDatabase().name()); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - /** - * Get the target indexing database. - * - * @return the target indexing database - */ - public TargetIndexingDatabase getTargetIndexingDatabase() { - return TargetIndexingDatabase.PUBLISH; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPluginMetadata.java deleted file mode 100644 index d321e68055..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/IndexToPublishPluginMetadata.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Index to Publish Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-03 - */ -public class IndexToPublishPluginMetadata extends AbstractIndexPluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.PUBLISH; - - public IndexToPublishPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPlugin.java deleted file mode 100644 index 71e344b533..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPlugin.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import java.util.HashMap; -import java.util.Map; - -/** - * Link Checking Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-05-16 - */ -public class LinkCheckingPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.LINK_CHECKING.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - LinkCheckingPlugin() { - //Required for json serialization - super(PluginType.LINK_CHECKING); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - LinkCheckingPlugin(LinkCheckingPluginMetadata pluginMetadata) { - super(PluginType.LINK_CHECKING, pluginMetadata); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - final Map extraParameters = new HashMap<>(); - if (Boolean.TRUE.equals(getPluginMetadata().getPerformSampling()) - && getPluginMetadata().getSampleSize() != null) { - extraParameters - .put(PluginParameterKeys.SAMPLE_SIZE, getPluginMetadata().getSampleSize().toString()); - } - return createDpsTaskForProcessPlugin(dpsTaskSettings, extraParameters); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPluginMetadata.java deleted file mode 100644 index 26f34f1d78..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/LinkCheckingPluginMetadata.java +++ /dev/null @@ -1,40 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Link Checking Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-05-16 - */ -public class LinkCheckingPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.LINK_CHECKING; - - private Boolean performSampling; - private Integer sampleSize; - - public LinkCheckingPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public void setPerformSampling(Boolean performSampling) { - this.performSampling = performSampling; - } - - public Integer getSampleSize() { - return sampleSize; - } - - public void setSampleSize(Integer sampleSize) { - this.sampleSize = sampleSize; - } - - public Boolean getPerformSampling() { - return performSampling; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPlugin.java deleted file mode 100644 index 0ccaaac080..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPlugin.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.PluginParameterKeys; - -import java.util.Map; - -/** - * Media Process Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-20 - */ -public class MediaProcessPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.MEDIA_PROCESS.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - MediaProcessPlugin() { - //Required for json serialization - super(PluginType.MEDIA_PROCESS); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - MediaProcessPlugin(MediaProcessPluginMetadata pluginMetadata) { - super(PluginType.MEDIA_PROCESS, pluginMetadata); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, - DpsTaskSettings dpsTaskSettings) { - ThrottlingLevel throttlingLevel = getPluginMetadata().getThrottlingLevel() == null ? - ThrottlingLevel.WEAK : getPluginMetadata().getThrottlingLevel(); - - return createDpsTaskForProcessPlugin(dpsTaskSettings, Map.of(PluginParameterKeys.MAXIMUM_PARALLELIZATION, - String.valueOf(dpsTaskSettings.getThrottlingValues().getThreadNumberFromThrottlingLevel(throttlingLevel)))); - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPluginMetadata.java deleted file mode 100644 index 60b79b0ff4..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MediaProcessPluginMetadata.java +++ /dev/null @@ -1,31 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Media Process Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-20 - */ -public class MediaProcessPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.MEDIA_PROCESS; - private ThrottlingLevel throttlingLevel; - - - public MediaProcessPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public ThrottlingLevel getThrottlingLevel() { - return throttlingLevel; - } - - public void setThrottlingLevel(ThrottlingLevel throttlingLevel) { - this.throttlingLevel = throttlingLevel; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPlugin.java deleted file mode 100644 index fb475107e9..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPlugin.java +++ /dev/null @@ -1,69 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import java.util.Date; -import java.util.Optional; - -/** - * This interface represents a plugin. It contains the minimum a plugin should support so that it - * can be plugged in the Metis workflow registry and can be accessible via the REST API of Metis. - */ -public interface MetisPlugin { - - String REPRESENTATION_NAME = "metadataRecord"; - - static String getRepresentationName() { - return REPRESENTATION_NAME; - } - - String getId(); - - /** - * @return {@link PluginType} - */ - PluginType getPluginType(); - - /** - * The metadata corresponding to this plugin. - * - * @return {@link MetisPluginMetadata} - */ - MetisPluginMetadata getPluginMetadata(); - - /** - * @return started {@link Date} of the execution of the plugin - */ - Date getStartedDate(); - - /** - * @return updated {@link Date} of the execution of the plugin - */ - Date getUpdatedDate(); - - /** - * @return finished {@link Date} of the execution of the plugin - */ - Date getFinishedDate(); - - /** - * @return status {@link PluginStatus} of the execution of the plugin - */ - PluginStatus getPluginStatus(); - - String getFailMessage(); - - /** - * @return The data status of this plugin. If null, this should be interpreted as being equal to - * {@link DataStatus#VALID} (due to backwards-compatibility). - */ - DataStatus getDataStatus(); - - /** - * Returns the data state for the plugin taking into account the default value. - * - * @param plugin The plugin. - * @return The data status of the given plugin. Is not null. - */ - static DataStatus getDataStatus(ExecutablePlugin plugin) { - return Optional.ofNullable(plugin.getDataStatus()).orElse(DataStatus.VALID); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPluginMetadata.java deleted file mode 100644 index 5f0ef9f93d..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/MetisPluginMetadata.java +++ /dev/null @@ -1,16 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import java.util.Date; - -/** - * This interface represents plugin metadata - */ -public interface MetisPluginMetadata { - - PluginType getPluginType(); - - String getRevisionNamePreviousPlugin(); - - Date getRevisionTimestampPreviousPlugin(); - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPlugin.java deleted file mode 100644 index 127c43f2bb..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPlugin.java +++ /dev/null @@ -1,44 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; - -/** - * Normalization Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-26 - */ -public class NormalizationPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.NORMALIZATION.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - NormalizationPlugin() { - //Required for json serialization - super(PluginType.NORMALIZATION); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - NormalizationPlugin(NormalizationPluginMetadata pluginMetadata) { - super(PluginType.NORMALIZATION, pluginMetadata); - } - - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - return createDpsTaskForProcessPlugin(dpsTaskSettings, null); - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPluginMetadata.java deleted file mode 100644 index 05a612e32f..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/NormalizationPluginMetadata.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Normalization Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-26 - */ -public class NormalizationPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.NORMALIZATION; - - public NormalizationPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPlugin.java deleted file mode 100644 index 12a97ac37a..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPlugin.java +++ /dev/null @@ -1,76 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.OAIPMHHarvestingDetails; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import org.apache.commons.lang3.StringUtils; - -/** - * OAIPMH Harvest Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-24 - */ -public class OaipmhHarvestPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.OAIPMH_HARVEST.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - public OaipmhHarvestPlugin() { - //Required for json serialization - super(PluginType.OAIPMH_HARVEST); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - OaipmhHarvestPlugin(OaipmhHarvestPluginMetadata pluginMetadata) { - super(PluginType.OAIPMH_HARVEST, pluginMetadata); - } - - /** - * Required for json serialization. - * - * @return the String representation of the topology - */ - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - String targetUrl = getPluginMetadata().getUrl(); - Map parameters = new HashMap<>(); - parameters.put(PluginParameterKeys.METIS_DATASET_ID, datasetId); - DpsTask dpsTask = createDpsTaskForHarvestPlugin(dpsTaskSettings, parameters, - targetUrl, getPluginMetadata().isIncrementalHarvest()); - - String setSpec = getPluginMetadata().getSetSpec(); - String metadataFormat = getPluginMetadata().getMetadataFormat(); - Date fromDate = getPluginMetadata().getFromDate(); - Date untilDate = getPluginMetadata().getUntilDate(); - - OAIPMHHarvestingDetails oaipmhHarvestingDetails = new OAIPMHHarvestingDetails(); - if (StringUtils.isNotEmpty(metadataFormat)) { - oaipmhHarvestingDetails.setSchema(metadataFormat); - } - if (StringUtils.isNotEmpty(setSpec)) { - oaipmhHarvestingDetails.setSet(setSpec); - } - oaipmhHarvestingDetails.setDateFrom(fromDate); - oaipmhHarvestingDetails.setDateUntil(untilDate); - dpsTask.setHarvestingDetails(oaipmhHarvestingDetails); - - return dpsTask; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPluginMetadata.java deleted file mode 100644 index d407464f97..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/OaipmhHarvestPluginMetadata.java +++ /dev/null @@ -1,93 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonFormat; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.Date; - -/** - * OAIPMH Harvest Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -public class OaipmhHarvestPluginMetadata extends AbstractHarvestPluginMetadata { - - private static final ExecutablePluginType PLUGIN_TYPE = ExecutablePluginType.OAIPMH_HARVEST; - private String url; - private String metadataFormat; - private String setSpec; - private boolean incrementalHarvest; // Default: false (i.e. full harvest) - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date fromDate; - @JsonFormat(pattern = CommonStringValues.DATE_FORMAT) - private Date untilDate; - //If useDefaultIdentifiers == true then this is the prefix to be trimmed from the OAI Header Identifier - private String identifierPrefixRemoval; - - public OaipmhHarvestPluginMetadata() { - //Required for json serialization - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getMetadataFormat() { - return metadataFormat; - } - - public void setMetadataFormat(String metadataFormat) { - this.metadataFormat = metadataFormat; - } - - public String getSetSpec() { - return setSpec; - } - - public void setSetSpec(String setSpec) { - this.setSpec = setSpec; - } - - public void setIncrementalHarvest(boolean incrementalHarvest) { - this.incrementalHarvest = incrementalHarvest; - } - - @Override - public boolean isIncrementalHarvest() { - return incrementalHarvest; - } - - public Date getFromDate() { - return fromDate == null ? null : new Date(fromDate.getTime()); - } - - public void setFromDate(Date fromDate) { - this.fromDate = fromDate == null ? null : new Date(fromDate.getTime()); - } - - public String getIdentifierPrefixRemoval() { - return identifierPrefixRemoval; - } - - public void setIdentifierPrefixRemoval(String identifierPrefixRemoval) { - this.identifierPrefixRemoval = identifierPrefixRemoval; - } - - public Date getUntilDate() { - return untilDate == null ? null : new Date(untilDate.getTime()); - } - - public void setUntilDate(Date untilDate) { - this.untilDate = untilDate == null ? null : new Date(untilDate.getTime()); - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return PLUGIN_TYPE; - } - -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginStatus.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginStatus.java deleted file mode 100644 index 481cdf4067..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginStatus.java +++ /dev/null @@ -1,30 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonCreator; - -/** - * The status that a plugin can have. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-26 - */ -public enum PluginStatus { - INQUEUE, CLEANING, RUNNING, FINISHED, CANCELLED, FAILED, PENDING, IDENTIFYING_DELETED_RECORDS; - - /** - * Lookup of a {@link PluginStatus} enum from a provided enum String representation of the enum - * value. - * - * @param enumName the String representation of an enum value - * @return the {@link PluginStatus} that represents the provided value or null if not found - */ - @JsonCreator - public static PluginStatus getPluginStatusFromEnumName(String enumName) { - for (PluginStatus pluginStatus : PluginStatus.values()) { - if (pluginStatus.name().equalsIgnoreCase(enumName)) { - return pluginStatus; - } - } - return null; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginType.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginType.java deleted file mode 100644 index b690d214d1..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/PluginType.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * Contains all Plugin types. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-24 - */ -public enum PluginType { - - HTTP_HARVEST, - - OAIPMH_HARVEST, - - ENRICHMENT, - - MEDIA_PROCESS, - - LINK_CHECKING, - - VALIDATION_EXTERNAL, - - TRANSFORMATION, - - VALIDATION_INTERNAL, - - NORMALIZATION, - - PREVIEW, - - PUBLISH, - - DEPUBLISH, - - REINDEX_TO_PREVIEW, - - REINDEX_TO_PUBLISH; - - /** - * Lookup of a {@link PluginType} enum from a provided enum String representation of the enum - * value. - * - * @param enumName the String representation of an enum value - * @return the {@link PluginType} that represents the provided value or null if not found - */ - @JsonCreator - public static PluginType getPluginTypeFromEnumName(@JsonProperty("pluginName") String enumName) { - for (PluginType pluginType : PluginType.values()) { - if (pluginType.name().equalsIgnoreCase(enumName)) { - return pluginType; - } - } - return null; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPlugin.java deleted file mode 100644 index 9525fc9546..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPlugin.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This represents a reindex to preview (which is not a executable plugin type). - */ -public class ReindexToPreviewPlugin extends AbstractMetisPlugin { - - ReindexToPreviewPlugin() { - this(null); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - public ReindexToPreviewPlugin(ReindexToPreviewPluginMetadata pluginMetadata) { - super(PluginType.REINDEX_TO_PREVIEW, pluginMetadata); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPluginMetadata.java deleted file mode 100644 index b1f2a8b12b..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPreviewPluginMetadata.java +++ /dev/null @@ -1,12 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This metadata applies to a reindex to preview (which is not a executable plugin type). - */ -public class ReindexToPreviewPluginMetadata extends AbstractMetisPluginMetadata { - - @Override - public PluginType getPluginType() { - return PluginType.REINDEX_TO_PREVIEW; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPlugin.java deleted file mode 100644 index 1453555168..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPlugin.java +++ /dev/null @@ -1,21 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This represents a reindex to publish (which is not a executable plugin type). - */ -public class ReindexToPublishPlugin extends AbstractMetisPlugin { - - ReindexToPublishPlugin() { - this(null); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - public ReindexToPublishPlugin(ReindexToPublishPluginMetadata pluginMetadata) { - super(PluginType.REINDEX_TO_PUBLISH, pluginMetadata); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPluginMetadata.java deleted file mode 100644 index 3f61bf015c..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ReindexToPublishPluginMetadata.java +++ /dev/null @@ -1,12 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This metadata applies to a reindex to publish (which is not a executable plugin type). - */ -public class ReindexToPublishPluginMetadata extends AbstractMetisPluginMetadata { - - @Override - public PluginType getPluginType() { - return PluginType.REINDEX_TO_PUBLISH; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingLevel.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingLevel.java deleted file mode 100644 index 789d2a6571..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingLevel.java +++ /dev/null @@ -1,10 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * This enum lists the various types of levels for setting up the throttling level - */ -public enum ThrottlingLevel { - WEAK, - MEDIUM, - STRONG -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingValues.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingValues.java deleted file mode 100644 index a7726ee528..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ThrottlingValues.java +++ /dev/null @@ -1,62 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Class encapsulating all possible throttling levels tuples - * - * @author Joana Sousa (joana.sousa@europeana.eu) - * @since 2022-08-12 - */ -public class ThrottlingValues { - - private final int weak; - private final int medium; - private final int strong; - - /** - * Constructor - * - * @param weak The throttling details to represent level weak - * @param medium The throttling details to represent level medium - * @param strong The throttling details to represent level strong - */ - public ThrottlingValues(int weak, int medium, int strong) { - this.weak = weak; - this.medium = medium; - this.strong = strong; - } - - /** - * Return the details related to weak throttling level - * @return The details about throttling level weak - */ - public int getWeak() { - return weak; - } - - /** - * Return the details related to medium throttling level - * @return The details about throttling level medium - */ - public int getMedium() { - return medium; - } - - /** - * Return the details related to strong throttling level - * @return The details about throttling level strong - */ - public int getStrong() { - return strong; - } - - public int getThreadNumberFromThrottlingLevel(ThrottlingLevel throttlingLevel){ - int result; - switch (throttlingLevel) { - case MEDIUM -> result = medium; - case STRONG -> result = strong; - default -> result = weak; - } - - return result; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/Topology.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/Topology.java deleted file mode 100644 index 40c248fad9..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/Topology.java +++ /dev/null @@ -1,39 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Contains all topology names. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-12 - */ -public enum Topology { - - HTTP_HARVEST("http_harvest"), - OAIPMH_HARVEST("oai_harvest"), - - VALIDATION("validation"), - - TRANSFORMATION("xslt_transform"), - - NORMALIZATION("normalization"), - - ENRICHMENT("enrichment"), - - MEDIA_PROCESS("media_process"), - - LINK_CHECKING("link_checker"), - - INDEX("indexer"), - - DEPUBLISH("depublication"); - - private final String topologyName; - - Topology(String topologyName) { - this.topologyName = topologyName; - } - - public String getTopologyName() { - return topologyName; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPlugin.java deleted file mode 100644 index b3d8b86bac..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPlugin.java +++ /dev/null @@ -1,66 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import eu.europeana.cloud.service.dps.PluginParameterKeys; -import eu.europeana.metis.utils.RestEndpoints; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * Transformation Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-29 - */ -public class TransformationPlugin extends AbstractExecutablePlugin { - - private final String topologyName = Topology.TRANSFORMATION.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - TransformationPlugin() { - //Required for json serialization - super(PluginType.TRANSFORMATION); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - TransformationPlugin(TransformationPluginMetadata pluginMetadata) { - super(PluginType.TRANSFORMATION, pluginMetadata); - } - - /** - * Required for json serialization. - * - * @return the String representation of the topology - */ - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - public DpsTask prepareDpsTask(String datasetId, - DpsTaskSettings dpsTaskSettings) { - Map extraParameters = new HashMap<>(); - extraParameters.put(PluginParameterKeys.XSLT_URL, - dpsTaskSettings.getMetisCoreBaseUrl() + RestEndpoints - .resolve(RestEndpoints.DATASETS_XSLT_XSLTID, - Collections.singletonList(getPluginMetadata().getXsltId()))); - extraParameters.put(PluginParameterKeys.METIS_DATASET_ID, datasetId); - extraParameters - .put(PluginParameterKeys.METIS_DATASET_NAME, getPluginMetadata().getDatasetName()); - extraParameters - .put(PluginParameterKeys.METIS_DATASET_COUNTRY, getPluginMetadata().getCountry()); - extraParameters - .put(PluginParameterKeys.METIS_DATASET_LANGUAGE, getPluginMetadata().getLanguage()); - return createDpsTaskForProcessPlugin(dpsTaskSettings, extraParameters); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPluginMetadata.java deleted file mode 100644 index ca3796a7c5..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/TransformationPluginMetadata.java +++ /dev/null @@ -1,66 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Transformation Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-29 - */ -public class TransformationPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.TRANSFORMATION; - private String xsltId; - private boolean customXslt; - private String datasetName; - private String country; - private String language; - - public TransformationPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public boolean isCustomXslt() { - return customXslt; - } - - public void setCustomXslt(boolean customXslt) { - this.customXslt = customXslt; - } - - public String getXsltId() { - return xsltId; - } - - public void setXsltId(String xsltId) { - this.xsltId = xsltId; - } - - public String getDatasetName() { - return datasetName; - } - - public void setDatasetName(String datasetName) { - this.datasetName = datasetName; - } - - public String getCountry() { - return country; - } - - public void setCountry(String country) { - this.country = country; - } - - public String getLanguage() { - return language; - } - - public void setLanguage(String language) { - this.language = language; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPlugin.java deleted file mode 100644 index 76d69ccb74..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPlugin.java +++ /dev/null @@ -1,56 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import java.util.Map; - -/** - * Validation External Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-24 - */ -public class ValidationExternalPlugin extends - AbstractExecutablePlugin { - - private final String topologyName = Topology.VALIDATION.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - ValidationExternalPlugin() { - //Required for json serialization - super(PluginType.VALIDATION_EXTERNAL); - - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - ValidationExternalPlugin(ValidationExternalPluginMetadata pluginMetadata) { - super(PluginType.VALIDATION_EXTERNAL, pluginMetadata); - } - - /** - * Required for json serialization. - * - * @return the String representation of the topology - */ - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - String urlOfSchemasZip = getPluginMetadata().getUrlOfSchemasZip(); - String schemaRootPath = getPluginMetadata().getSchemaRootPath(); - String schematronRootPath = getPluginMetadata().getSchematronRootPath(); - Map extraParameters = createParametersForValidationExternal(urlOfSchemasZip, - schemaRootPath, schematronRootPath); - return createDpsTaskForProcessPlugin(dpsTaskSettings, extraParameters); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPluginMetadata.java deleted file mode 100644 index e4cc2899e1..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationExternalPluginMetadata.java +++ /dev/null @@ -1,48 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Validation External Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -public class ValidationExternalPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.VALIDATION_EXTERNAL; - private String urlOfSchemasZip; - private String schemaRootPath; - private String schematronRootPath; - - public ValidationExternalPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public String getUrlOfSchemasZip() { - return urlOfSchemasZip; - } - - public void setUrlOfSchemasZip(String urlOfSchemasZip) { - this.urlOfSchemasZip = urlOfSchemasZip; - } - - public String getSchemaRootPath() { - return schemaRootPath; - } - - public void setSchemaRootPath(String schemaRootPath) { - this.schemaRootPath = schemaRootPath; - } - - public String getSchematronRootPath() { - return schematronRootPath; - } - - public void setSchematronRootPath(String schematronRootPath) { - this.schematronRootPath = schematronRootPath; - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPlugin.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPlugin.java deleted file mode 100644 index cbd5badd84..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPlugin.java +++ /dev/null @@ -1,55 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -import eu.europeana.cloud.service.dps.DpsTask; -import java.util.Map; - -/** - * Validation Internal Plugin. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-29 - */ -public class ValidationInternalPlugin extends - AbstractExecutablePlugin { - - private final String topologyName = Topology.VALIDATION.getTopologyName(); - - /** - * Zero argument constructor that initializes the {@link #pluginType} corresponding to the - * plugin. - */ - ValidationInternalPlugin() { - //Required for json serialization - super(PluginType.VALIDATION_INTERNAL); - } - - /** - * Constructor to initialize the plugin with pluginMetadata. - *

Initializes the {@link #pluginType} as well.

- * - * @param pluginMetadata The plugin metadata. - */ - ValidationInternalPlugin(ValidationInternalPluginMetadata pluginMetadata) { - super(PluginType.VALIDATION_INTERNAL, pluginMetadata); - } - - /** - * Required for json serialization. - * - * @return the String representation of the topology - */ - @Override - public String getTopologyName() { - return topologyName; - } - - @Override - DpsTask prepareDpsTask(String datasetId, DpsTaskSettings dpsTaskSettings) { - String urlOfSchemasZip = getPluginMetadata().getUrlOfSchemasZip(); - String schemaRootPath = getPluginMetadata().getSchemaRootPath(); - String schematronRootPath = getPluginMetadata().getSchematronRootPath(); - Map extraParameters = createParametersForValidationInternal(urlOfSchemasZip, - schemaRootPath, schematronRootPath); - return createDpsTaskForProcessPlugin(dpsTaskSettings, extraParameters); - } -} diff --git a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPluginMetadata.java b/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPluginMetadata.java deleted file mode 100644 index 48b744582a..0000000000 --- a/metis-core/metis-core-common/src/main/java/eu/europeana/metis/core/workflow/plugins/ValidationInternalPluginMetadata.java +++ /dev/null @@ -1,48 +0,0 @@ -package eu.europeana.metis.core.workflow.plugins; - -/** - * Validation Internal Plugin Metadata. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-01-29 - */ -public class ValidationInternalPluginMetadata extends AbstractExecutablePluginMetadata { - - private static final ExecutablePluginType pluginType = ExecutablePluginType.VALIDATION_INTERNAL; - private String urlOfSchemasZip; - private String schemaRootPath; - private String schematronRootPath; - - public ValidationInternalPluginMetadata() { - //Required for json serialization - } - - @Override - public ExecutablePluginType getExecutablePluginType() { - return pluginType; - } - - public String getUrlOfSchemasZip() { - return urlOfSchemasZip; - } - - public void setUrlOfSchemasZip(String urlOfSchemasZip) { - this.urlOfSchemasZip = urlOfSchemasZip; - } - - public String getSchemaRootPath() { - return schemaRootPath; - } - - public void setSchemaRootPath(String schemaRootPath) { - this.schemaRootPath = schemaRootPath; - } - - public String getSchematronRootPath() { - return schematronRootPath; - } - - public void setSchematronRootPath(String schematronRootPath) { - this.schematronRootPath = schematronRootPath; - } -} diff --git a/metis-core/metis-core-common/src/main/resources/log4j2.xml b/metis-core/metis-core-common/src/main/resources/log4j2.xml deleted file mode 100644 index 6d088df2ea..0000000000 --- a/metis-core/metis-core-common/src/main/resources/log4j2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TestRecordIdUtils.java b/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TestRecordIdUtils.java deleted file mode 100644 index 5717998416..0000000000 --- a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TestRecordIdUtils.java +++ /dev/null @@ -1,96 +0,0 @@ -package eu.europeana.metis.core.common; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import eu.europeana.metis.exception.BadContentException; -import java.util.Optional; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.Test; - -class TestRecordIdUtils { - - @Test - void testDecomposeFullRecordId() { - - // Good input - assertEquals(new ImmutablePair<>("1", "A"), - RecordIdUtils.decomposeFullRecordId("/1/A")); - assertEquals(new ImmutablePair<>("123", "ABC"), - RecordIdUtils.decomposeFullRecordId("/123/ABC")); - - // Bad input - assertNull(RecordIdUtils.decomposeFullRecordId("//")); - assertNull(RecordIdUtils.decomposeFullRecordId("/1/")); - assertNull(RecordIdUtils.decomposeFullRecordId("//A")); - assertNull(RecordIdUtils.decomposeFullRecordId("//1/A")); - assertNull(RecordIdUtils.decomposeFullRecordId("/1//A")); - assertNull(RecordIdUtils.decomposeFullRecordId("1/A")); - assertNull(RecordIdUtils.decomposeFullRecordId("1A")); - assertNull(RecordIdUtils.decomposeFullRecordId(" /1/A")); - assertNull(RecordIdUtils.decomposeFullRecordId("/1/A ")); - assertNull(RecordIdUtils.decomposeFullRecordId("/ 1/A")); - assertNull(RecordIdUtils.decomposeFullRecordId("/1 /A")); - assertNull(RecordIdUtils.decomposeFullRecordId("/1/ A")); - } - - @Test - void testCheckAndNormalizeRecordId() throws BadContentException { - - // Empty record IDs - assertFalse(RecordIdUtils.checkAndNormalizeRecordId("dataset1", "").isPresent()); - assertFalse(RecordIdUtils.checkAndNormalizeRecordId("dataset1", " ").isPresent()); - - // Simple IDs with and without spaces - assertEquals(Optional.of("id1"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "id1")); - assertEquals(Optional.of("id2"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", " id2")); - assertEquals(Optional.of("id3"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "id3 ")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "id1/")); - - // IDs with dataset prefix - assertEquals(Optional.of("id1"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "/id1")); - assertEquals(Optional.of("id2"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset1/id2")); - assertEquals(Optional.of("id3"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "/dataset1/id3")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "/dataset1/id1/")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "/dataset1//id2")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset2/id3")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "/dataset2/id1")); - - // IDs with prefixes - assertEquals(Optional.of("id1"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "a/dataset1/id1")); - assertEquals(Optional.of("id2"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "http://a/dataset1/id2")); - assertEquals(Optional.of("id3"), RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "file://a/dataset1/id3")); - - // IDs with invalid characters - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset1/ id1")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset1 /id2")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset 1/id3")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "test 1/dataset1/id1")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset1/id-2")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "dataset1/i?d3")); - assertThrows(BadContentException.class, ()-> RecordIdUtils - .checkAndNormalizeRecordId("dataset1", "(dataset1)/id1")); - } -} diff --git a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TransformationParametersTest.java b/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TransformationParametersTest.java deleted file mode 100644 index a3edc1e498..0000000000 --- a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/common/TransformationParametersTest.java +++ /dev/null @@ -1,23 +0,0 @@ -package eu.europeana.metis.core.common; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import eu.europeana.metis.core.dataset.Dataset; -import java.util.Locale; -import org.junit.jupiter.api.Test; - -class TransformationParametersTest { - - @Test - void testTransformationParametersConstruction() { - final Dataset dataset = new Dataset(); - dataset.setDatasetId("exampleDatasetId"); - dataset.setDatasetName("exampleDatasetName"); - dataset.setCountry(Country.GREECE); - dataset.setLanguage(Language.EL); - final TransformationParameters transformationParameters = new TransformationParameters(dataset); - assertEquals(dataset.getDatasetId() + "_" + dataset.getDatasetName(), transformationParameters.getDatasetName()); - assertEquals(dataset.getCountry().getName(), transformationParameters.getEdmCountry()); - assertEquals(dataset.getLanguage().name().toLowerCase(Locale.US), transformationParameters.getEdmLanguage()); - } -} \ No newline at end of file diff --git a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/rest/execution/details/PluginViewTest.java b/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/rest/execution/details/PluginViewTest.java deleted file mode 100644 index e173509fc7..0000000000 --- a/metis-core/metis-core-common/src/test/java/eu/europeana/metis/core/rest/execution/details/PluginViewTest.java +++ /dev/null @@ -1,154 +0,0 @@ -package eu.europeana.metis.core.rest.execution.details; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.Topology; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class PluginViewTest { - - private PluginView pluginView; - - private static Stream providePluginTestData() { - return Stream.of( - Arguments.of(ExecutablePluginFactory.createPlugin(new HTTPHarvestPluginMetadata()), - PluginType.HTTP_HARVEST, Topology.HTTP_HARVEST), - Arguments.of(ExecutablePluginFactory.createPlugin(new OaipmhHarvestPluginMetadata()), - PluginType.OAIPMH_HARVEST, Topology.OAIPMH_HARVEST), - Arguments.of(ExecutablePluginFactory.createPlugin(new ValidationExternalPluginMetadata()), - PluginType.VALIDATION_EXTERNAL, Topology.VALIDATION), - Arguments.of(ExecutablePluginFactory.createPlugin(new TransformationPluginMetadata()), - PluginType.TRANSFORMATION, Topology.TRANSFORMATION), - Arguments.of(ExecutablePluginFactory.createPlugin(new LinkCheckingPluginMetadata()), - PluginType.LINK_CHECKING, Topology.LINK_CHECKING), - Arguments.of(ExecutablePluginFactory.createPlugin(new ValidationInternalPluginMetadata()), - PluginType.VALIDATION_INTERNAL, Topology.VALIDATION), - Arguments.of(ExecutablePluginFactory.createPlugin(new NormalizationPluginMetadata()), - PluginType.NORMALIZATION, Topology.NORMALIZATION), - Arguments.of(ExecutablePluginFactory.createPlugin(new EnrichmentPluginMetadata()), - PluginType.ENRICHMENT, Topology.ENRICHMENT), - Arguments.of(ExecutablePluginFactory.createPlugin(new MediaProcessPluginMetadata()), - PluginType.MEDIA_PROCESS, Topology.MEDIA_PROCESS), - Arguments.of(ExecutablePluginFactory.createPlugin(new IndexToPreviewPluginMetadata()), - PluginType.PREVIEW, Topology.INDEX), - Arguments.of(ExecutablePluginFactory.createPlugin(new IndexToPublishPluginMetadata()), - PluginType.PUBLISH, Topology.INDEX), - Arguments.of(ExecutablePluginFactory.createPlugin(new DepublishPluginMetadata()), - PluginType.DEPUBLISH, Topology.DEPUBLISH) - ); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getPluginType(AbstractMetisPlugin metisPlugin, PluginType expectedPluginType) { - pluginView = new PluginView(metisPlugin, true); - assertEquals(expectedPluginType, pluginView.getPluginType()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getId(AbstractMetisPlugin metisPlugin, PluginType expectedPluginType) { - pluginView = new PluginView(metisPlugin, true); - assertTrue(pluginView.getId().contains(expectedPluginType.name())); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getPluginStatus(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertEquals(PluginStatus.INQUEUE, pluginView.getPluginStatus()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getDataStatus(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertEquals(DataStatus.NOT_YET_GENERATED, pluginView.getDataStatus()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getFailMessage(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNull(pluginView.getFailMessage()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getStartedDate(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNull(pluginView.getStartedDate()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getUpdatedDate(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNull(pluginView.getUpdatedDate()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getFinishedDate(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNull(pluginView.getFinishedDate()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getExternalTaskId(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNull(pluginView.getExternalTaskId()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getExecutionProgress(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNotNull(pluginView.getExecutionProgress()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getTopologyName(AbstractMetisPlugin metisPlugin, PluginType pluginType, Topology topology) { - pluginView = new PluginView(metisPlugin, true); - assertEquals(topology.getTopologyName(), pluginView.getTopologyName()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void isCanDisplayRawXml(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertTrue(pluginView.isCanDisplayRawXml()); - } - - @ParameterizedTest - @MethodSource("providePluginTestData") - void getPluginMetadata(AbstractMetisPlugin metisPlugin) { - pluginView = new PluginView(metisPlugin, true); - assertNotNull(pluginView.getPluginMetadata()); - } -} diff --git a/metis-core/metis-core-rest/.gitignore b/metis-core/metis-core-rest/.gitignore deleted file mode 100644 index 47e041f234..0000000000 --- a/metis-core/metis-core-rest/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -##Add to ignore to not commit by mistake -/src/main/resources/application.properties \ No newline at end of file diff --git a/metis-core/metis-core-rest/Dockerfile b/metis-core/metis-core-rest/Dockerfile deleted file mode 100644 index 8a40812780..0000000000 --- a/metis-core/metis-core-rest/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM eclipse-temurin:21-jre -COPY target/*.jar app.jar -EXPOSE 8080 -ENTRYPOINT ["java", "-jar", "/app.jar"] diff --git a/metis-core/metis-core-rest/docker-compose.yml b/metis-core/metis-core-rest/docker-compose.yml deleted file mode 100644 index bbed40477c..0000000000 --- a/metis-core/metis-core-rest/docker-compose.yml +++ /dev/null @@ -1,48 +0,0 @@ -version: '3.8' - -services: - rabbitmq: - image: rabbitmq:3.9.12-management-alpine - container_name: metis-core-rabbitmq - environment: - - RABBIT_DEFAULT_VHOST=/ - - RABBITMQ_DEFAULT_USER=guest - - RABBITMQ_DEFAULT_PASS=guest - ports: - - '5672:5672' - - '15672:15672' - redis: - image: redis:6.0.16-alpine - container_name: metis-core-redis - command: redis-server --requirepass guest - ports: - - '6379:6379' - mongo: - image: mongo:4.2.9 - container_name: metis-core-mongo - environment: - MONGO_INITDB_DATABASE: metis-core - MONGO_INITDB_ROOT_USERNAME: guest - MONGO_INITDB_ROOT_PASSWORD: guest - ports: - - '27017:27017' - metis-core-local: - image: europeana/metis-core:develop - container_name: metis-core-local - build: - context: ./ - dockerfile: Dockerfile - ports: - - '8080:8080' - environment: - RABBITMQ_HOST: metis-core-rabbitmq - MONGO_HOSTS: metis-core-mongo - REDIS_HOST: metis-core-redis - volumes: - - /data/metis-configuration/metis-framework/metis-core/metis-core-rest/k8s/overlays/local/resources/custom-truststore.jks:/data/certificates/custom-truststore.jks - - /data/metis-configuration/metis-framework/metis-core/metis-core-rest/k8s/overlays/local/resources/application.properties:/application.properties - - /data/metis-configuration/metis-framework/metis-core/metis-core-rest/k8s/overlays/local/resources/log4j2.xml:/data/logging/log4j2.xml - depends_on: - - rabbitmq - - mongo - - redis diff --git a/metis-core/metis-core-rest/pom.xml b/metis-core/metis-core-rest/pom.xml deleted file mode 100644 index bf97a7d794..0000000000 --- a/metis-core/metis-core-rest/pom.xml +++ /dev/null @@ -1,207 +0,0 @@ - - - 4.0.0 - - metis-core - eu.europeana.metis - 12.2 - - metis-core-rest - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-logging - - - - - org.springframework.boot - spring-boot-starter-actuator - - - org.springframework.boot - spring-boot-configuration-processor - true - - - org.springframework.boot - spring-boot-starter-log4j2 - - - co.elastic.apm - apm-agent-attach - ${version.elastic.apm} - runtime - - - eu.europeana.cloud - ecloud-service-mcs-rest-client-java - ${version.ecloud} - - - org.springframework.security - spring-security-acl - - - org.springframework.security - spring-security-config - - - org.springframework.security - spring-security-web - - - - - eu.europeana.cloud - ecloud-service-dps-rest-client-java - ${version.ecloud} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - org.springframework - spring-webmvc - - - org.springframework - spring-context - - - commons-logging - commons-logging - - - - - org.springframework - spring-web - - - eu.europeana.metis - metis-common-spring-properties - ${project.version} - - - eu.europeana.metis - metis-core-service - ${project.version} - - - - - com.fasterxml.jackson.core - jackson-databind - ${version.jackson} - - - com.fasterxml.jackson.core - jackson-annotations - ${version.jackson} - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - ${version.jackson} - - - - org.hamcrest - hamcrest-all - test - - - org.mockito - mockito-core - - - - org.junit.jupiter - junit-jupiter-api - - - org.junit.jupiter - junit-jupiter-engine - - - org.springframework - spring-test - - - eu.europeana.metis - metis-authentication-rest-client - ${project.version} - - - com.jayway.jsonpath - json-path - test - - - com.jayway.jsonpath - json-path-assert - test - - - - - - org.springframework.boot - spring-boot-maven-plugin - ${version.spring.boot} - - - - repackage - - - - - - - - - - - org.springframework.boot - spring-boot-dependencies - ${version.spring.boot} - pom - import - - - - diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ApplicationConfiguration.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ApplicationConfiguration.java deleted file mode 100644 index dfdd86484c..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ApplicationConfiguration.java +++ /dev/null @@ -1,241 +0,0 @@ -package eu.europeana.metis.core.rest.config; - -import com.mongodb.client.MongoClient; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.core.rest.config.properties.MetisCoreConfigurationProperties; -import eu.europeana.metis.core.service.Authorizer; -import eu.europeana.metis.core.service.DatasetService; -import eu.europeana.metis.core.service.DepublishRecordIdService; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.mongo.connection.MongoClientProvider; -import eu.europeana.metis.mongo.connection.MongoProperties; -import eu.europeana.metis.mongo.connection.MongoProperties.ReadPreferenceValue; -import eu.europeana.metis.mongo.utils.CustomObjectMapper; -import eu.europeana.metis.utils.CustomTruststoreAppender; -import eu.europeana.metis.utils.CustomTruststoreAppender.TrustStoreConfigurationException; -import eu.europeana.metis.utils.apm.ElasticAPMConfiguration; -import jakarta.annotation.PreDestroy; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import metis.common.config.properties.TruststoreConfigurationProperties; -import metis.common.config.properties.ecloud.EcloudConfigurationProperties; -import metis.common.config.properties.mongo.MongoConfigurationProperties; -import org.apache.commons.lang3.StringUtils; -import org.redisson.api.RedissonClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.Resource; -import org.springframework.web.multipart.support.StandardServletMultipartResolver; -import org.springframework.web.servlet.DispatcherServlet; -import org.springframework.web.servlet.View; -import org.springframework.web.servlet.ViewResolver; -import org.springframework.web.servlet.view.BeanNameViewResolver; -import org.springframework.web.servlet.view.json.MappingJackson2JsonView; - -/** - * Entry class with configuration fields and beans initialization for the application. - */ -@Configuration -@EnableConfigurationProperties({ - ElasticAPMConfiguration.class, TruststoreConfigurationProperties.class, MongoConfigurationProperties.class, - MetisCoreConfigurationProperties.class, EcloudConfigurationProperties.class}) -@ComponentScan(basePackages = {"eu.europeana.metis.core.rest.controller"}) -public class ApplicationConfiguration { - - private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final MongoClient mongoClient; - - @Value(value = "classpath:default_transformation.xslt") - private Resource defaultTransformation; - - /** - * Autowired constructor for Spring Configuration class. - * - * @throws TrustStoreConfigurationException if the configuration of the truststore failed - */ - @Autowired - public ApplicationConfiguration(TruststoreConfigurationProperties truststoreConfigurationProperties, - MongoConfigurationProperties mongoConfigurationProperties) - throws TrustStoreConfigurationException { - ApplicationConfiguration.initializeTruststore(truststoreConfigurationProperties); - this.mongoClient = ApplicationConfiguration.getMongoClient(mongoConfigurationProperties); - } - - /** - * This method performs the initializing tasks for the application. - * - * @param truststoreConfigurationProperties The properties. - * @throws CustomTruststoreAppender.TrustStoreConfigurationException In case a problem occurred with the truststore. - */ - static void initializeTruststore(TruststoreConfigurationProperties truststoreConfigurationProperties) - throws CustomTruststoreAppender.TrustStoreConfigurationException { - if (StringUtils.isNotEmpty(truststoreConfigurationProperties.getPath()) && StringUtils - .isNotEmpty(truststoreConfigurationProperties.getPassword())) { - CustomTruststoreAppender - .appendCustomTruststoreToDefault(truststoreConfigurationProperties.getPath(), - truststoreConfigurationProperties.getPassword()); - LOGGER.info("Custom truststore appended to default truststore"); - } - } - - public static MongoClient getMongoClient(MongoConfigurationProperties mongoConfigurationProperties) { - final MongoProperties mongoProperties = new MongoProperties<>( - IllegalArgumentException::new); - mongoProperties.setAllProperties( - mongoConfigurationProperties.getHosts(), - mongoConfigurationProperties.getPorts(), - mongoConfigurationProperties.getAuthenticationDatabase(), - mongoConfigurationProperties.getUsername(), - mongoConfigurationProperties.getPassword(), - mongoConfigurationProperties.isEnableSsl(), - ReadPreferenceValue.PRIMARY_PREFERRED, - mongoConfigurationProperties.getApplicationName()); - - return new MongoClientProvider<>(mongoProperties).createMongoClient(); - } - - @Bean(name = DispatcherServlet.MULTIPART_RESOLVER_BEAN_NAME) - public StandardServletMultipartResolver getMultipartResolver() { - return new StandardServletMultipartResolver(); - } - - @Bean - AuthenticationClient getAuthenticationClient(MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - return new AuthenticationClient(metisCoreConfigurationProperties.getAuthenticationBaseUrl()); - } - - @Bean - MorphiaDatastoreProvider getMorphiaDatastoreProvider(MongoConfigurationProperties mongoConfigurationProperties) - throws IOException { - return new MorphiaDatastoreProviderImpl(mongoClient, mongoConfigurationProperties.getDatabase(), - defaultTransformation::getInputStream); - } - - @Bean - Authorizer geAuthorizer(DatasetDao datasetDao) { - return new Authorizer(datasetDao); - } - - /** - * Get the DAO for datasets. - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} - * @param ecloudDataSetServiceClient the ecloud dataset client - * @return {@link DatasetDao} used to access the database for datasets - */ - @Bean - public DatasetDao getDatasetDao( - MorphiaDatastoreProvider morphiaDatastoreProvider, DataSetServiceClient ecloudDataSetServiceClient, - EcloudConfigurationProperties ecloudConfigurationProperties) { - DatasetDao datasetDao = new DatasetDao(morphiaDatastoreProvider, ecloudDataSetServiceClient); - datasetDao.setDatasetsPerRequest(RequestLimits.DATASETS_PER_REQUEST.getLimit()); - datasetDao.setEcloudProvider(ecloudConfigurationProperties.getProvider()); - return datasetDao; - } - - /** - * Get the DAO for xslts. - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} - * @return {@link DatasetXsltDao} used to access the database for datasets - */ - @Bean - public DatasetXsltDao getXsltDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - return new DatasetXsltDao(morphiaDatastoreProvider); - } - - /** - * Get the DAO for depublished records. - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} - * @return DAO used to access the database for depublished records. - */ - @Bean - public DepublishRecordIdDao getDepublishedRecordDao( - MorphiaDatastoreProvider morphiaDatastoreProvider, - MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - return new DepublishRecordIdDao(morphiaDatastoreProvider, - metisCoreConfigurationProperties.getMaxDepublishRecordIdsPerDataset()); - } - - /** - * Get the Service for datasets. - *

It encapsulates several DAOs and combines their functionality into methods

- * - * @param datasetDao the Dao instance to access the Dataset database - * @param datasetXsltDao the Dao instance to access the DatasetXslt database - * @param workflowDao the Dao instance to access the Workflow database - * @param workflowExecutionDao the Dao instance to access the WorkflowExecution database - * @param scheduledWorkflowDao the Dao instance to access the ScheduledWorkflow database - * @param redissonClient {@link RedissonClient} - * @param authorizer the authorizer for this service - * @return the dataset service instance instantiated - */ - @Bean - public DatasetService getDatasetService( - DatasetDao datasetDao, DatasetXsltDao datasetXsltDao, - WorkflowDao workflowDao, WorkflowExecutionDao workflowExecutionDao, - ScheduledWorkflowDao scheduledWorkflowDao, RedissonClient redissonClient, - Authorizer authorizer, MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - DatasetService datasetService = new DatasetService(datasetDao, datasetXsltDao, workflowDao, - workflowExecutionDao, scheduledWorkflowDao, redissonClient, authorizer); - datasetService.setMetisCoreUrl(metisCoreConfigurationProperties.getBaseUrl()); - return datasetService; - } - - @Bean - public DepublishRecordIdService getDepublishedRecordService( - DepublishRecordIdDao depublishRecordIdDao, OrchestratorService orchestratorService, - Authorizer authorizer) { - return new DepublishRecordIdService(authorizer, orchestratorService, depublishRecordIdDao); - } - - /** - * Closes connections to databases when the application closes. - */ - @PreDestroy - public void close() { - if (mongoClient != null) { - mongoClient.close(); - } - } - - /** - * Required for json serialization for REST. - * - * @return {@link View} - */ - @Bean - public View json() { - MappingJackson2JsonView view = new MappingJackson2JsonView(); - view.setPrettyPrint(true); - view.setObjectMapper(new CustomObjectMapper()); - return view; - } - - /** - * Required for json serialization for REST. - * - * @return {@link ViewResolver} - */ - @Bean - public ViewResolver viewResolver() { - return new BeanNameViewResolver(); - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ECloudConfig.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ECloudConfig.java deleted file mode 100644 index 87c4f5ec12..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/ECloudConfig.java +++ /dev/null @@ -1,122 +0,0 @@ -package eu.europeana.metis.core.rest.config; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.client.uis.rest.UISClient; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.mcs.driver.FileServiceClient; -import eu.europeana.cloud.mcs.driver.RecordServiceClient; -import eu.europeana.metis.core.rest.config.properties.MetisCoreConfigurationProperties; -import jakarta.annotation.PreDestroy; -import metis.common.config.properties.ecloud.EcloudConfigurationProperties; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -/** - * ECloud configuration class. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-11-22 - */ -@Configuration -@EnableConfigurationProperties({ - MetisCoreConfigurationProperties.class, EcloudConfigurationProperties.class}) -@ComponentScan(basePackages = {"eu.europeana.metis.core.rest.controller"}) -public class ECloudConfig implements WebMvcConfigurer { - - private DataSetServiceClient dataSetServiceClient; - private RecordServiceClient recordServiceClient; - private FileServiceClient fileServiceClient; - private DpsClient dpsClient; - private UISClient uisClient; - - - @Bean - DataSetServiceClient dataSetServiceClient( - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - dataSetServiceClient = new DataSetServiceClient( - ecloudConfigurationProperties.getBaseUrl(), - ecloudConfigurationProperties.getUsername(), - ecloudConfigurationProperties.getPassword(), - metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds(), - metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()); - return dataSetServiceClient; - } - - @Bean - RecordServiceClient recordServiceClient( - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - recordServiceClient = new RecordServiceClient( - ecloudConfigurationProperties.getBaseUrl(), - ecloudConfigurationProperties.getUsername(), - ecloudConfigurationProperties.getPassword(), - metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds(), - metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()); - return recordServiceClient; - } - - @Bean - FileServiceClient fileServiceClient( - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - fileServiceClient = new FileServiceClient( - ecloudConfigurationProperties.getBaseUrl(), - ecloudConfigurationProperties.getUsername(), - ecloudConfigurationProperties.getPassword(), - metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds(), - metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()); - return fileServiceClient; - } - - @Bean - DpsClient dpsClient( - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - dpsClient = new DpsClient( - ecloudConfigurationProperties.getDpsBaseUrl(), - ecloudConfigurationProperties.getUsername(), - ecloudConfigurationProperties.getPassword(), - metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds(), - metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()); - return dpsClient; - } - - @Bean - UISClient uisClient( - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - uisClient = new UISClient( - ecloudConfigurationProperties.getBaseUrl(), - ecloudConfigurationProperties.getUsername(), - ecloudConfigurationProperties.getPassword(), - metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds(), - metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()); - return uisClient; - } - - /** - * Close all open clients. - */ - @PreDestroy - public void close() { - if (dataSetServiceClient != null) { - dataSetServiceClient.close(); - } - if (recordServiceClient != null) { - recordServiceClient.close(); - } - if (fileServiceClient != null) { - fileServiceClient.close(); - } - if (dpsClient != null) { - dpsClient.close(); - } - if (uisClient != null) { - uisClient.close(); - } - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/OrchestratorConfig.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/OrchestratorConfig.java deleted file mode 100644 index aa289cc676..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/OrchestratorConfig.java +++ /dev/null @@ -1,354 +0,0 @@ -package eu.europeana.metis.core.rest.config; - -import com.rabbitmq.client.Channel; -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.client.uis.rest.UISClient; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.mcs.driver.FileServiceClient; -import eu.europeana.cloud.mcs.driver.RecordServiceClient; -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dao.WorkflowValidationUtils; -import eu.europeana.metis.core.execution.SchedulerExecutor; -import eu.europeana.metis.core.execution.SemaphoresPerPluginManager; -import eu.europeana.metis.core.execution.WorkflowExecutionMonitor; -import eu.europeana.metis.core.execution.WorkflowExecutorManager; -import eu.europeana.metis.core.execution.WorkflowPostProcessor; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.core.rest.config.properties.MetisCoreConfigurationProperties; -import eu.europeana.metis.core.service.Authorizer; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.service.ProxiesService; -import eu.europeana.metis.core.service.RedirectionInferrer; -import eu.europeana.metis.core.service.ScheduleWorkflowService; -import eu.europeana.metis.core.service.WorkflowExecutionFactory; -import eu.europeana.metis.core.workflow.ValidationProperties; -import eu.europeana.metis.core.workflow.plugins.ThrottlingValues; -import jakarta.annotation.PreDestroy; -import java.net.MalformedURLException; -import java.nio.file.Paths; -import java.time.Duration; -import java.util.concurrent.TimeUnit; -import metis.common.config.properties.TruststoreConfigurationProperties; -import metis.common.config.properties.ecloud.EcloudConfigurationProperties; -import metis.common.config.properties.rabbitmq.RabbitmqConfigurationProperties; -import metis.common.config.properties.redis.RedisConfigurationProperties; -import metis.common.config.properties.redis.RedissonConfigurationProperties; -import metis.common.config.properties.validation.ValidationConfigurationProperties; -import org.apache.commons.lang3.StringUtils; -import org.redisson.Redisson; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.redisson.config.SingleServerConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -/** - * Orchestrator configuration class. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-11-22 - */ -@Configuration -@EnableConfigurationProperties({ - TruststoreConfigurationProperties.class, ValidationConfigurationProperties.class, - RedisConfigurationProperties.class, MetisCoreConfigurationProperties.class, - EcloudConfigurationProperties.class}) -@ComponentScan(basePackages = {"eu.europeana.metis.core.rest.controller"}) -@EnableScheduling -public class OrchestratorConfig implements WebMvcConfigurer { - - private static final Logger LOGGER = LoggerFactory.getLogger(OrchestratorConfig.class); - private SchedulerExecutor schedulerExecutor; - private WorkflowExecutionMonitor workflowExecutionMonitor; - private RedissonClient redissonClient; - - @Bean - RedissonClient getRedissonClient( - TruststoreConfigurationProperties truststoreConfigurationProperties, - RedisConfigurationProperties redisConfigurationProperties) - throws MalformedURLException { - Config config = new Config(); - - SingleServerConfig singleServerConfig; - if (redisConfigurationProperties.isEnableSsl()) { - singleServerConfig = config.useSingleServer().setAddress(String - .format("rediss://%s:%s", redisConfigurationProperties.getHost(), - redisConfigurationProperties.getPort())); - LOGGER.info("Redis enabled SSL"); - if (redisConfigurationProperties.isEnableCustomTruststore()) { - singleServerConfig - .setSslTruststore(Paths.get(truststoreConfigurationProperties.getPath()).toUri().toURL()); - singleServerConfig.setSslTruststorePassword(truststoreConfigurationProperties.getPassword()); - LOGGER.info("Redis enabled SSL using custom Truststore"); - } - } else { - singleServerConfig = config.useSingleServer().setAddress(String - .format("redis://%s:%s", redisConfigurationProperties.getHost(), - redisConfigurationProperties.getPort())); - LOGGER.info("Redis disabled SSL"); - } - if (StringUtils.isNotEmpty(redisConfigurationProperties.getUsername())) { - singleServerConfig.setUsername(redisConfigurationProperties.getUsername()); - } - if (StringUtils.isNotEmpty(redisConfigurationProperties.getPassword())) { - singleServerConfig.setPassword(redisConfigurationProperties.getPassword()); - } - - RedissonConfigurationProperties redisson = redisConfigurationProperties.getRedisson(); - singleServerConfig.setConnectionPoolSize(redisson.getConnectionPoolSize()) - .setConnectionMinimumIdleSize(redisson.getConnectionPoolSize()) - .setConnectTimeout((int) TimeUnit.SECONDS.toMillis(redisson.getConnectTimeoutInSeconds())) - .setDnsMonitoringInterval((int) TimeUnit.SECONDS.toMillis(redisson.getDnsMonitorIntervalInSeconds())) - .setIdleConnectionTimeout((int) TimeUnit.SECONDS.toMillis(redisson.getIdleConnectionTimeoutInSeconds())) - .setRetryAttempts(redisson.getRetryAttempts()); - //Give some secs to unlock if connection lost, or if too long to unlock - config.setLockWatchdogTimeout(TimeUnit.SECONDS.toMillis(redisson.getLockWatchdogTimeoutInSeconds())); - redissonClient = Redisson.create(config); - return redissonClient; - } - - @Bean - public OrchestratorService getOrchestratorService(WorkflowDao workflowDao, - WorkflowExecutionDao workflowExecutionDao, WorkflowValidationUtils workflowValidationUtils, - DataEvolutionUtils dataEvolutionUtils, DatasetDao datasetDao, - WorkflowExecutionFactory workflowExecutionFactory, - WorkflowExecutorManager workflowExecutorManager, Authorizer authorizer, - DepublishRecordIdDao depublishRecordIdDao, - MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - OrchestratorService orchestratorService = new OrchestratorService(workflowExecutionFactory, - workflowDao, workflowExecutionDao, workflowValidationUtils, dataEvolutionUtils, datasetDao, - workflowExecutorManager, redissonClient, authorizer, depublishRecordIdDao); - orchestratorService.setSolrCommitPeriodInMins(metisCoreConfigurationProperties.getSolrCommitPeriodInMinutes()); - return orchestratorService; - } - - @Bean(name = "validationExternalProperties") - public ValidationProperties getValidationExternalProperties( - ValidationConfigurationProperties validationConfigurationProperties) { - return new ValidationProperties( - validationConfigurationProperties.getValidationExternalSchemaZip(), - validationConfigurationProperties.getValidationExternalSchemaRoot(), - validationConfigurationProperties.getValidationExternalSchematronRoot()); - } - - @Bean(name = "validationInternalProperties") - public ValidationProperties getValidationInternalProperties( - ValidationConfigurationProperties validationConfigurationProperties) { - return new ValidationProperties( - validationConfigurationProperties.getValidationInternalSchemaZip(), - validationConfigurationProperties.getValidationInternalSchemaRoot(), - validationConfigurationProperties.getValidationInternalSchematronRoot()); - } - - @Bean - public WorkflowExecutionFactory getWorkflowExecutionFactory( - @Qualifier("validationExternalProperties") ValidationProperties validationExternalProperties, - @Qualifier("validationInternalProperties") ValidationProperties validationInternalProperties, - RedirectionInferrer redirectionInferrer, - DatasetXsltDao datasetXsltDao, DepublishRecordIdDao depublishRecordIdDao, - MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - WorkflowExecutionFactory workflowExecutionFactory = new WorkflowExecutionFactory(datasetXsltDao, - depublishRecordIdDao, redirectionInferrer); - workflowExecutionFactory - .setValidationExternalProperties(validationExternalProperties); - workflowExecutionFactory - .setValidationInternalProperties(validationInternalProperties); - workflowExecutionFactory.setDefaultSamplingSizeForLinkChecking( - metisCoreConfigurationProperties.getLinkCheckingDefaultSamplingSize()); - return workflowExecutionFactory; - } - - @Bean - public RedirectionInferrer getRedirectionInferrer(WorkflowExecutionDao workflowExecutionDao, - DataEvolutionUtils dataEvolutionUtils) { - return new RedirectionInferrer(workflowExecutionDao, dataEvolutionUtils); - } - - @Bean - public ScheduleWorkflowService getScheduleWorkflowService( - ScheduledWorkflowDao scheduledWorkflowDao, WorkflowDao workflowDao, DatasetDao datasetDao, - Authorizer authorizer) { - return new ScheduleWorkflowService(scheduledWorkflowDao, workflowDao, datasetDao, authorizer); - } - - @Bean - public ProxiesService getProxiesService( - WorkflowExecutionDao workflowExecutionDao, DataSetServiceClient ecloudDataSetServiceClient, - RecordServiceClient recordServiceClient, FileServiceClient fileServiceClient, - DpsClient dpsClient, UISClient uisClient, Authorizer authorizer, - EcloudConfigurationProperties ecloudConfigurationProperties) { - return new ProxiesService(workflowExecutionDao, ecloudDataSetServiceClient, recordServiceClient, - fileServiceClient, dpsClient, uisClient, ecloudConfigurationProperties.getProvider(), authorizer); - } - - /** - * Bean workflow execution post processor. - * - * @param depublishRecordIdDao the depublish record id dao - * @param datasetDao the dataset dao - * @param workflowExecutionDao the workflow execution dao - * @param dpsClient the dps client - * @return the workflow post processor - */ - @Bean - public WorkflowPostProcessor workflowPostProcessor(DepublishRecordIdDao depublishRecordIdDao, - DatasetDao datasetDao, WorkflowExecutionDao workflowExecutionDao, DpsClient dpsClient) { - return new WorkflowPostProcessor(depublishRecordIdDao, datasetDao, workflowExecutionDao, - dpsClient); - } - - /** - * Bean semaphore plugin manager. - * - * @return the semaphore plugin manager - */ - @Bean - public SemaphoresPerPluginManager semaphoresPerPluginManager( - MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - return new SemaphoresPerPluginManager(metisCoreConfigurationProperties.getMaxConcurrentThreads()); - } - - @Bean - public WorkflowExecutorManager getWorkflowExecutorManager( - SemaphoresPerPluginManager semaphoresPerPluginManager, - WorkflowExecutionDao workflowExecutionDao, WorkflowPostProcessor workflowPostProcessor, - @Qualifier("rabbitmqPublisherChannel") Channel rabbitmqPublisherChannel, - @Qualifier("rabbitmqConsumerChannel") Channel rabbitmqConsumerChannel, - RedissonClient redissonClient, DpsClient dpsClient, - RabbitmqConfigurationProperties rabbitmqConfigurationProperties, - MetisCoreConfigurationProperties metisCoreConfigurationProperties, - EcloudConfigurationProperties ecloudConfigurationProperties) { - WorkflowExecutorManager workflowExecutorManager = new WorkflowExecutorManager( - semaphoresPerPluginManager, workflowExecutionDao, workflowPostProcessor, - rabbitmqPublisherChannel, rabbitmqConsumerChannel, redissonClient, dpsClient); - workflowExecutorManager.setRabbitmqQueueName(rabbitmqConfigurationProperties.getQueueName()); - workflowExecutorManager - .setDpsMonitorCheckIntervalInSecs(metisCoreConfigurationProperties.getDpsMonitorCheckIntervalInSeconds()); - workflowExecutorManager.setPeriodOfNoProcessedRecordsChangeInMinutes( - metisCoreConfigurationProperties.getPeriodOfNoProcessedRecordsChangeInMinutes()); - workflowExecutorManager.setEcloudBaseUrl(ecloudConfigurationProperties.getBaseUrl()); - workflowExecutorManager.setEcloudProvider(ecloudConfigurationProperties.getProvider()); - workflowExecutorManager.setMetisCoreBaseUrl(metisCoreConfigurationProperties.getBaseUrl()); - workflowExecutorManager.setThrottlingValues(getThrottlingValues(metisCoreConfigurationProperties)); - return workflowExecutorManager; - } - - @Bean - public WorkflowExecutionDao getWorkflowExecutionDao( - MorphiaDatastoreProvider morphiaDatastoreProvider, - MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - WorkflowExecutionDao workflowExecutionDao = new WorkflowExecutionDao(morphiaDatastoreProvider); - workflowExecutionDao - .setWorkflowExecutionsPerRequest(RequestLimits.WORKFLOW_EXECUTIONS_PER_REQUEST.getLimit()); - workflowExecutionDao - .setMaxServedExecutionListLength(metisCoreConfigurationProperties.getMaxServedExecutionListLength()); - return workflowExecutionDao; - } - - @Bean - DataEvolutionUtils getDataEvolutionUtils(WorkflowExecutionDao workflowExecutionDao) { - return new DataEvolutionUtils(workflowExecutionDao); - } - - @Bean - WorkflowValidationUtils getWorkflowValidationUtils(DataEvolutionUtils dataEvolutionUtils, - DepublishRecordIdDao depublishRecordIdDao) { - return new WorkflowValidationUtils(depublishRecordIdDao, dataEvolutionUtils); - } - - @Bean - public ScheduledWorkflowDao getScheduledWorkflowDao( - MorphiaDatastoreProvider morphiaDatastoreProvider) { - return new ScheduledWorkflowDao(morphiaDatastoreProvider); - } - - @Bean - public WorkflowDao getWorkflowDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - return new WorkflowDao(morphiaDatastoreProvider); - } - - @Bean - public WorkflowExecutionMonitor getWorkflowExecutionMonitor( - WorkflowExecutorManager workflowExecutorManager, WorkflowExecutionDao workflowExecutionDao, - RedissonClient redissonClient, MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - - // Computes the leniency for the failsafe action: how long ago (worst case) can the last update - // time have been set before we assume the execution hangs. - final Duration failsafeLeniency = Duration.ZERO - .plusMillis(metisCoreConfigurationProperties.getDpsConnectTimeoutInMilliseconds()) - .plusMillis(metisCoreConfigurationProperties.getDpsReadTimeoutInMilliseconds()) - .plusSeconds(metisCoreConfigurationProperties.getDpsMonitorCheckIntervalInSeconds()) - .plusSeconds(metisCoreConfigurationProperties.getFailsafeMarginOfInactivityInSeconds()); - - // Create and return the workflow execution monitor. - workflowExecutionMonitor = new WorkflowExecutionMonitor(workflowExecutorManager, - workflowExecutionDao, redissonClient, failsafeLeniency); - return workflowExecutionMonitor; - } - - @Bean - public SchedulerExecutor getSchedulingExecutor(OrchestratorService orchestratorService, - ScheduleWorkflowService scheduleWorkflowService, RedissonClient redissonClient) { - schedulerExecutor = new SchedulerExecutor(orchestratorService, scheduleWorkflowService, - redissonClient); - return schedulerExecutor; - } - - @Bean - public ThrottlingValues getThrottlingValues(MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - return new ThrottlingValues(metisCoreConfigurationProperties.getThreadLimitThrottlingLevelWeak(), - metisCoreConfigurationProperties.getThreadLimitThrottlingLevelMedium(), - metisCoreConfigurationProperties.getThreadLimitThrottlingLevelStrong()); - } - - /** - * Failsafe periodic thread. - *

It will find stale executions and will re-submit them in the distributed queue.

- */ - // TODO: 24/08/2023 Is there a better way to load the configuration here? - @Scheduled(fixedDelayString = "${metis-core.periodicFailsafeCheckInMilliseconds}") - public void runFailsafeExecutor() { - this.workflowExecutionMonitor.performFailsafe(); - LOGGER.info("Failsafe task finished."); - } - - /** - * Scheduling periodic thread. - *

Checks if scheduled workflows are valid for starting and sends them to the distributed - * queue.

- */ - // TODO: 24/08/2023 Is there a better way to load the configuration here? - @Scheduled( - fixedDelayString = "${metis-core.periodicSchedulerCheckInMilliseconds}", - initialDelayString = "${metis-core.periodicSchedulerCheckInMilliseconds}") - public void runSchedulingExecutor() { - this.schedulerExecutor.performScheduling(); - LOGGER.info("Scheduler task finished."); - } - - /** - * Close resources - */ - @PreDestroy - public void close() { - // Shut down Redisson - if (redissonClient != null && !redissonClient.isShuttingDown()) { - redissonClient.shutdown(); - } - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java deleted file mode 100644 index 7c1af34134..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/QueueConfig.java +++ /dev/null @@ -1,171 +0,0 @@ -package eu.europeana.metis.core.rest.config; - -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.Connection; -import com.rabbitmq.client.ConnectionFactory; -import com.rabbitmq.client.impl.ForgivingExceptionHandler; -import eu.europeana.metis.core.execution.QueueConsumer; -import eu.europeana.metis.core.execution.WorkflowExecutionMonitor; -import eu.europeana.metis.core.execution.WorkflowExecutorManager; -import eu.europeana.metis.exception.GenericMetisException; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateException; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeoutException; -import jakarta.annotation.PreDestroy; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; -import metis.common.config.properties.TruststoreConfigurationProperties; -import metis.common.config.properties.rabbitmq.RabbitmqConfigurationProperties; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -@Configuration -@EnableConfigurationProperties({RabbitmqConfigurationProperties.class, TruststoreConfigurationProperties.class}) -@ComponentScan(basePackages = {"eu.europeana.metis.core.rest.controller"}) -@EnableScheduling -public class QueueConfig implements WebMvcConfigurer { - - private static final Logger LOGGER = LoggerFactory.getLogger(QueueConfig.class); - private QueueConsumer queueConsumer; - - private Connection connection; - private Channel publisherChannel; - private Channel consumerChannel; - - @Bean - Connection getConnection(RabbitmqConfigurationProperties rabbitmqConfigurationProperties, - TruststoreConfigurationProperties truststoreConfigurationProperties) - throws KeyManagementException, NoSuchAlgorithmException, IOException, TimeoutException, KeyStoreException, CertificateException { - ConnectionFactory connectionFactory = new ConnectionFactory(); - connectionFactory.setHost(rabbitmqConfigurationProperties.getHost()); - connectionFactory.setPort(rabbitmqConfigurationProperties.getPort()); - connectionFactory.setVirtualHost( - StringUtils.isNotBlank(rabbitmqConfigurationProperties.getVirtualHost()) ? rabbitmqConfigurationProperties - .getVirtualHost() : "/"); - connectionFactory.setUsername(rabbitmqConfigurationProperties.getUsername()); - connectionFactory.setPassword(rabbitmqConfigurationProperties.getPassword()); - connectionFactory.setAutomaticRecoveryEnabled(true); - if (rabbitmqConfigurationProperties.isEnableSsl()) { - if (rabbitmqConfigurationProperties.isEnableCustomTruststore()) { - // Load the ssl context with the provided truststore - final KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); - // This file is determined in the config files, it does not pose a risk. - @SuppressWarnings("findsecbugs:PATH_TRAVERSAL_IN") - final Path trustStoreFile = Paths.get( - truststoreConfigurationProperties.getPath()); - try (final InputStream inputStream = Files.newInputStream(trustStoreFile)) { - keyStore.load(inputStream, truststoreConfigurationProperties.getPassword().toCharArray()); - } - TrustManagerFactory trustManagerFactory = TrustManagerFactory - .getInstance(TrustManagerFactory.getDefaultAlgorithm()); - trustManagerFactory.init(keyStore); - SSLContext sslContext = SSLContext.getInstance("TLS"); - sslContext.init(null, trustManagerFactory.getTrustManagers(), null); - connectionFactory.useSslProtocol(sslContext); - LOGGER.info("RabbitMQ enabled SSL WITH certificate verification using custom Truststore"); - } else { - connectionFactory.useSslProtocol(); - LOGGER.info("RabbitMQ enabled SSL WITHOUT certificate verification"); - } - } - //Does not close the channel if an unhandled exception occurred - //Can happen in QueueConsumer and it's safe to not handle the execution, it will be picked up - //again from the failsafe Executor. - connectionFactory.setExceptionHandler(new ForgivingExceptionHandler()); - connection = connectionFactory.newConnection(); - return connection; - } - - @Bean(name = "rabbitmqPublisherChannel") - Channel getRabbitmqPublisherChannel(Connection connection, RabbitmqConfigurationProperties rabbitmqConfigurationProperties) - throws IOException { - publisherChannel = connection.createChannel(); - setupChannelProperties(publisherChannel, rabbitmqConfigurationProperties); - return publisherChannel; - } - - @Bean(name = "rabbitmqConsumerChannel") - Channel getRabbitmqConsumerChannel(Connection connection, RabbitmqConfigurationProperties rabbitmqConfigurationProperties) - throws IOException { - consumerChannel = connection.createChannel(); - setupChannelProperties(consumerChannel, rabbitmqConfigurationProperties); - return consumerChannel; - } - - private void setupChannelProperties(Channel channel, RabbitmqConfigurationProperties rabbitmqConfigurationProperties) - throws IOException { - Map args = new ConcurrentHashMap<>(); - args.put("x-max-priority", - rabbitmqConfigurationProperties.getHighestPriority());//Higher number means higher priority - //Second boolean durable to false - channel.queueDeclare(rabbitmqConfigurationProperties.getQueueName(), false, false, false, args); - } - - @Bean - public QueueConsumer getQueueConsumer( - RabbitmqConfigurationProperties rabbitmqConfigurationProperties, - WorkflowExecutorManager workflowExecutionManager, - WorkflowExecutionMonitor workflowExecutionMonitor, - @Qualifier("rabbitmqConsumerChannel") Channel rabbitmqConsumerChannel) throws IOException { - queueConsumer = new QueueConsumer(rabbitmqConsumerChannel, - rabbitmqConfigurationProperties.getQueueName(), workflowExecutionManager, workflowExecutionManager, - workflowExecutionMonitor); - return queueConsumer; - } - - // TODO: 24/08/2023 Is there a better way to load the configuration here? - @Scheduled( - fixedDelayString = "${metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds}", - initialDelayString = "${metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds}") - public void runQueueConsumerCleanup() throws InterruptedException { - this.queueConsumer.checkAndCleanCompletionService(); - LOGGER.debug("Queue consumer cleanup finished."); - } - - /** - * Close resources. - * - * @throws GenericMetisException if a resource failed to close - */ - @PreDestroy - public void close() throws GenericMetisException { - try { - // Shut down RabbitMQ - if (publisherChannel != null && publisherChannel.isOpen()) { - publisherChannel.close(); - } - if (consumerChannel != null && consumerChannel.isOpen()) { - consumerChannel.close(); - } - if (connection != null && connection.isOpen()) { - connection.close(); - } - // Shutdown the queue consumer - if (queueConsumer != null) { - queueConsumer.close(); - } - } catch (IOException | TimeoutException e) { - throw new GenericMetisException("Could not shutdown resources properly.", e); - } - } - -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/WebMvcConfig.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/WebMvcConfig.java deleted file mode 100644 index 53ae232495..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/WebMvcConfig.java +++ /dev/null @@ -1,43 +0,0 @@ -package eu.europeana.metis.core.rest.config; - -import eu.europeana.metis.core.rest.config.properties.MetisCoreConfigurationProperties; -import java.nio.charset.StandardCharsets; -import java.util.List; -import org.springframework.context.annotation.Configuration; -import org.springframework.http.converter.HttpMessageConverter; -import org.springframework.http.converter.StringHttpMessageConverter; -import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter; -import org.springframework.web.servlet.config.annotation.CorsRegistry; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -/** - * Web MVC configuration - */ -@Configuration -public class WebMvcConfig implements WebMvcConfigurer { - - private final MetisCoreConfigurationProperties metisCoreConfigurationProperties; - - /** - * Constructor. - * - * @param metisCoreConfigurationProperties The properties. - */ - public WebMvcConfig(MetisCoreConfigurationProperties metisCoreConfigurationProperties) { - this.metisCoreConfigurationProperties = metisCoreConfigurationProperties; - } - - @Override - public void addCorsMappings(CorsRegistry registry) { - registry.addMapping("/**").allowedMethods("GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS") - .allowedOrigins(metisCoreConfigurationProperties.getAllowedCorsHosts()); - } - - @Override - public void configureMessageConverters(List> converters) { - converters.add(new MappingJackson2HttpMessageConverter()); - converters.add(new MappingJackson2XmlHttpMessageConverter()); - converters.add(new StringHttpMessageConverter(StandardCharsets.UTF_8)); - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java deleted file mode 100644 index c264ff69fc..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/config/properties/MetisCoreConfigurationProperties.java +++ /dev/null @@ -1,187 +0,0 @@ -package eu.europeana.metis.core.rest.config.properties; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -/** - * Class using {@link ConfigurationProperties} loading. - */ -@ConfigurationProperties(prefix = "metis-core") -public class MetisCoreConfigurationProperties { - - private int maxConcurrentThreads; - private int dpsMonitorCheckIntervalInSeconds; - private int dpsConnectTimeoutInMilliseconds; - private int dpsReadTimeoutInMilliseconds; - private int failsafeMarginOfInactivityInSeconds; - private int periodicFailsafeCheckInMilliseconds; - private int periodicSchedulerCheckInMilliseconds; - private int pollingTimeoutForCleaningCompletionServiceInMilliseconds; - private int periodOfNoProcessedRecordsChangeInMinutes; - private int threadLimitThrottlingLevelWeak; - private int threadLimitThrottlingLevelMedium; - private int threadLimitThrottlingLevelStrong; - - private String baseUrl; - private int maxServedExecutionListLength; - private int maxDepublishRecordIdsPerDataset; - - private int linkCheckingDefaultSamplingSize; - private int solrCommitPeriodInMinutes; - - private String authenticationBaseUrl; - private String[] allowedCorsHosts; - - - public int getMaxConcurrentThreads() { - return maxConcurrentThreads; - } - - public void setMaxConcurrentThreads(int maxConcurrentThreads) { - this.maxConcurrentThreads = maxConcurrentThreads; - } - - public int getDpsMonitorCheckIntervalInSeconds() { - return dpsMonitorCheckIntervalInSeconds; - } - - public void setDpsMonitorCheckIntervalInSeconds(int dpsMonitorCheckIntervalInSeconds) { - this.dpsMonitorCheckIntervalInSeconds = dpsMonitorCheckIntervalInSeconds; - } - - public int getDpsConnectTimeoutInMilliseconds() { - return dpsConnectTimeoutInMilliseconds; - } - - public void setDpsConnectTimeoutInMilliseconds(int dpsConnectTimeoutInMilliseconds) { - this.dpsConnectTimeoutInMilliseconds = dpsConnectTimeoutInMilliseconds; - } - - public int getDpsReadTimeoutInMilliseconds() { - return dpsReadTimeoutInMilliseconds; - } - - public void setDpsReadTimeoutInMilliseconds(int dpsReadTimeoutInMilliseconds) { - this.dpsReadTimeoutInMilliseconds = dpsReadTimeoutInMilliseconds; - } - - public int getFailsafeMarginOfInactivityInSeconds() { - return failsafeMarginOfInactivityInSeconds; - } - - public void setFailsafeMarginOfInactivityInSeconds(int failsafeMarginOfInactivityInSeconds) { - this.failsafeMarginOfInactivityInSeconds = failsafeMarginOfInactivityInSeconds; - } - - public int getPeriodicFailsafeCheckInMilliseconds() { - return periodicFailsafeCheckInMilliseconds; - } - - public void setPeriodicFailsafeCheckInMilliseconds(int periodicFailsafeCheckInMilliseconds) { - this.periodicFailsafeCheckInMilliseconds = periodicFailsafeCheckInMilliseconds; - } - - public int getPeriodicSchedulerCheckInMilliseconds() { - return periodicSchedulerCheckInMilliseconds; - } - - public void setPeriodicSchedulerCheckInMilliseconds(int periodicSchedulerCheckInMilliseconds) { - this.periodicSchedulerCheckInMilliseconds = periodicSchedulerCheckInMilliseconds; - } - - public int getPollingTimeoutForCleaningCompletionServiceInMilliseconds() { - return pollingTimeoutForCleaningCompletionServiceInMilliseconds; - } - - public void setPollingTimeoutForCleaningCompletionServiceInMilliseconds( - int pollingTimeoutForCleaningCompletionServiceInMilliseconds) { - this.pollingTimeoutForCleaningCompletionServiceInMilliseconds = pollingTimeoutForCleaningCompletionServiceInMilliseconds; - } - - public int getPeriodOfNoProcessedRecordsChangeInMinutes() { - return periodOfNoProcessedRecordsChangeInMinutes; - } - - public void setPeriodOfNoProcessedRecordsChangeInMinutes(int periodOfNoProcessedRecordsChangeInMinutes) { - this.periodOfNoProcessedRecordsChangeInMinutes = periodOfNoProcessedRecordsChangeInMinutes; - } - - public int getThreadLimitThrottlingLevelWeak() { - return threadLimitThrottlingLevelWeak; - } - - public void setThreadLimitThrottlingLevelWeak(int threadLimitThrottlingLevelWeak) { - this.threadLimitThrottlingLevelWeak = threadLimitThrottlingLevelWeak; - } - - public int getThreadLimitThrottlingLevelMedium() { - return threadLimitThrottlingLevelMedium; - } - - public void setThreadLimitThrottlingLevelMedium(int threadLimitThrottlingLevelMedium) { - this.threadLimitThrottlingLevelMedium = threadLimitThrottlingLevelMedium; - } - - public int getThreadLimitThrottlingLevelStrong() { - return threadLimitThrottlingLevelStrong; - } - - public void setThreadLimitThrottlingLevelStrong(int threadLimitThrottlingLevelStrong) { - this.threadLimitThrottlingLevelStrong = threadLimitThrottlingLevelStrong; - } - - public String getBaseUrl() { - return baseUrl; - } - - public void setBaseUrl(String baseUrl) { - this.baseUrl = baseUrl; - } - - public int getMaxServedExecutionListLength() { - return maxServedExecutionListLength; - } - - public void setMaxServedExecutionListLength(int maxServedExecutionListLength) { - this.maxServedExecutionListLength = maxServedExecutionListLength; - } - - public int getMaxDepublishRecordIdsPerDataset() { - return maxDepublishRecordIdsPerDataset; - } - - public void setMaxDepublishRecordIdsPerDataset(int maxDepublishRecordIdsPerDataset) { - this.maxDepublishRecordIdsPerDataset = maxDepublishRecordIdsPerDataset; - } - - public int getLinkCheckingDefaultSamplingSize() { - return linkCheckingDefaultSamplingSize; - } - - public void setLinkCheckingDefaultSamplingSize(int linkCheckingDefaultSamplingSize) { - this.linkCheckingDefaultSamplingSize = linkCheckingDefaultSamplingSize; - } - - public int getSolrCommitPeriodInMinutes() { - return solrCommitPeriodInMinutes; - } - - public void setSolrCommitPeriodInMinutes(int solrCommitPeriodInMinutes) { - this.solrCommitPeriodInMinutes = solrCommitPeriodInMinutes; - } - - public String getAuthenticationBaseUrl() { - return authenticationBaseUrl; - } - - public void setAuthenticationBaseUrl(String authenticationBaseUrl) { - this.authenticationBaseUrl = authenticationBaseUrl; - } - - public String[] getAllowedCorsHosts() { - return allowedCorsHosts == null ? null : allowedCorsHosts.clone(); - } - - public void setAllowedCorsHosts(String[] allowedCorsHosts) { - this.allowedCorsHosts = allowedCorsHosts == null ? null : allowedCorsHosts.clone(); - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DatasetController.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DatasetController.java deleted file mode 100644 index 48b79d29e4..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DatasetController.java +++ /dev/null @@ -1,695 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; - -import com.fasterxml.jackson.annotation.JsonProperty; -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetSearchView; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.dataset.DatasetXsltStringWrapper; -import eu.europeana.metis.core.exceptions.DatasetAlreadyExistsException; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoXsltFoundException; -import eu.europeana.metis.core.exceptions.XsltSetupException; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.service.DatasetService; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.TransformationPlugin; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; - -/** - * Contains all the calls that are related to Datasets. - *

The {@link DatasetService} has control on how to manipulate a dataset

- */ -@RestController -public class DatasetController { - - private static final Logger LOGGER = LoggerFactory.getLogger(DatasetController.class); - - private final DatasetService datasetService; - private final AuthenticationClient authenticationClient; - - /** - * Autowired constructor with all required parameters. - * - * @param datasetService the datasetService - * @param authenticationClient the java client to communicate with the external authentication service - */ - @Autowired - public DatasetController(DatasetService datasetService, - AuthenticationClient authenticationClient) { - this.datasetService = datasetService; - this.authenticationClient = authenticationClient; - } - - /** - * Create a provided dataset. - *

Dataset is provided as json or xml.

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param dataset the provided dataset to be created - * @return the dataset created including all other fields that are auto generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link DatasetAlreadyExistsException} if the dataset already exists for the organizationId and datasetName.
  • - *
  • {@link UserUnauthorizedException} if the authorization header is un-parsable or the user cannot be authenticated or authorized or the user is unauthorized.
  • - *
- */ - @PostMapping(value = RestEndpoints.DATASETS, consumes = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public Dataset createDataset(@RequestHeader("Authorization") String authorization, - @RequestBody Dataset dataset) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - Dataset createdDataset = datasetService.createDataset(metisUserView, dataset); - LOGGER.info("Dataset with datasetId: {}, datasetName: {} and organizationId {} created", - createdDataset.getDatasetId(), createdDataset.getDatasetName(), - createdDataset.getOrganizationId()); - return createdDataset; - } - - /** - * Update a provided dataset including an xslt string. - *

- * Non allowed fields, to be manually updated, will be ignored. Updating a dataset with a new xslt will only overwrite the - * {@code Dataset#xsltId} and a new {@link DatasetXslt} object will be stored. The older {@link DatasetXslt} will still be - * accessible. - *

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetXsltStringWrapper {@link DatasetXsltStringWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset was not found for the datasetId.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
  • {@link DatasetAlreadyExistsException} if a datasetName change is requested and the datasetName for that organizationId already exists.
  • - *
- */ - @PutMapping(value = RestEndpoints.DATASETS, consumes = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void updateDataset(@RequestHeader("Authorization") String authorization, - @RequestBody DatasetXsltStringWrapper datasetXsltStringWrapper) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - datasetService - .updateDataset(metisUserView, datasetXsltStringWrapper.getDataset(), datasetXsltStringWrapper - .getXslt()); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Dataset with datasetId {} updated", - CRLF_PATTERN.matcher(datasetXsltStringWrapper.getDataset().getDatasetId()).replaceAll("")); - } - } - - /** - * Delete a dataset using a datasetId. - * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetId the identifier used to find and delete the dataset - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found for datasetId
  • - *
- */ - @DeleteMapping(value = RestEndpoints.DATASETS_DATASETID) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void deleteDataset(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - datasetService.deleteDatasetByDatasetId(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Dataset with datasetId '{}' deleted", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - } - - /** - * Get a dataset based on its datasetId - * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetId the identifier used to find a dataset - * @return {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public Dataset getByDatasetId(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - Dataset storedDataset = datasetService.getDatasetByDatasetId(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Dataset with datasetId '{}' found", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - return storedDataset; - } - - /** - * Get the xslt object containing the escaped xslt string using a dataset identifier. - * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetId the identifier used to find a dataset - * @return the {@link DatasetXslt} object containing the xslt as an escaped string - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_DATASETID_XSLT, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public DatasetXslt getDatasetXsltByDatasetId(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - DatasetXslt datasetXslt = datasetService.getDatasetXsltByDatasetId(metisUserView, datasetId); - LOGGER.info("Dataset XSLT with datasetId '{}' and xsltId: '{}' found", datasetId, - datasetXslt.getId()); - return datasetXslt; - } - - /** - * Get the xslt string as non escaped text using an xslt identifier. - *

- * It is a method that does not require authentication and it is meant to be used from external service to download the - * corresponding xslt. At the point of writing, ECloud transformation topology is using it. {@link TransformationPlugin} - *

- * - * @param xsltId the xslt identifier - * @return the text non escaped representation of the xslt string - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_XSLT_XSLTID, produces = { - MediaType.TEXT_PLAIN_VALUE}) - @ResponseStatus(HttpStatus.OK) - public String getXsltByXsltId(@PathVariable("xsltId") String xsltId) - throws GenericMetisException { - DatasetXslt datasetXslt = datasetService.getDatasetXsltByXsltId(xsltId); - LOGGER.info("XSLT with xsltId '{}' found", datasetXslt.getId()); - return datasetXslt.getXslt(); - } - - /** - * Create a new default xslt in the database. - *

- * Each dataset can have it's own custom xslt but a default xslt should always be available. Creating a new default xslt will - * create a new {@link DatasetXslt} object and the older one will still be available. The created {@link DatasetXslt} will have - * it's {@code DatasetXslt#datasetId} as -1 to indicate that it is not related to a specific dataset. - *

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param xsltString the text of the String representation non escaped - * @return the created {@link DatasetXslt} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @PostMapping(value = RestEndpoints.DATASETS_XSLT_DEFAULT, consumes = { - MediaType.TEXT_PLAIN_VALUE}, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public DatasetXslt createDefaultXslt(@RequestHeader("Authorization") String authorization, - @RequestBody String xsltString) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - DatasetXslt defaultDatasetXslt = datasetService.createDefaultXslt(metisUserView, xsltString); - LOGGER.info("New default xslt created with xsltId: {}", defaultDatasetXslt.getId()); - return defaultDatasetXslt; - } - - /** - * Get the latest created default xslt. - *

- * It is an method that does not require authentication and it is meant to be used from external service to download the - * corresponding xslt. At the point of writing, ECloud transformation topology is using it. {@link TransformationPlugin} - *

- * - * @return the text representation of the String xslt non escaped - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_XSLT_DEFAULT, produces = { - MediaType.TEXT_PLAIN_VALUE}) - @ResponseStatus(HttpStatus.OK) - public String getLatestDefaultXslt() throws GenericMetisException { - DatasetXslt datasetXslt = datasetService.getLatestDefaultXslt(); - LOGGER.info("Default XSLT with xsltId '{}' found", datasetXslt.getId()); - return datasetXslt.getXslt(); - } - - /** - * Transform a list of xmls using the latest dataset xslt stored. - *

- * This method is meant to be used after a response from - * {@link ProxiesController#getListOfFileContentsFromPluginExecution(String, String, ExecutablePluginType, String)} to try a - * transformation on a list of xmls just after validation external to preview an example result. - *

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetId the dataset identifier, it is required for authentication and for the dataset fields xslt injection - * @param records the list of {@link Record} that contain the xml fields {@code Record#xmlRecord}. - * @return a list of {@link Record}s with the field {@code Record#xmlRecord} containing the transformed xml - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the authorization header is un-parsable or the user cannot be - * authenticated or authorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link NoXsltFoundException} if there is no xslt found
  • - *
  • {@link XsltSetupException} if the XSL transform could not be set up
  • - *
- */ - @PostMapping(value = RestEndpoints.DATASETS_DATASETID_XSLT_TRANSFORM, consumes = { - MediaType.APPLICATION_JSON_VALUE}, produces = {MediaType.APPLICATION_JSON_VALUE}) - @ResponseStatus(HttpStatus.OK) - public List transformRecordsUsingLatestDatasetXslt( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestBody List records) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return datasetService.transformRecordsUsingLatestDatasetXslt(metisUserView, datasetId, records); - } - - /** - * Transform a list of xmls using the latest default xslt stored. - *

- * This method is meant to be used after a response from - * {@link ProxiesController#getListOfFileContentsFromPluginExecution(String, String, ExecutablePluginType, String)} to try a - * transformation on a list of xmls just after validation external to preview an example result. - *

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetId the dataset identifier, it is required for authentication and for the dataset fields xslt injection - * @param records the list of {@link Record} that contain the xml fields {@code Record#xmlRecord}. - * @return a list of {@link Record}s with the field {@code Record#xmlRecord} containing the transformed xml - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the authorization header is un-parsable or the user cannot be - * authenticated or authorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link NoXsltFoundException} if there is no xslt found
  • - *
  • {@link XsltSetupException} if the XSL transform could not be set up
  • - *
- */ - @PostMapping(value = RestEndpoints.DATASETS_DATASETID_XSLT_TRANSFORM_DEFAULT, consumes = { - MediaType.APPLICATION_JSON_VALUE}, produces = {MediaType.APPLICATION_JSON_VALUE}) - @ResponseStatus(HttpStatus.OK) - public List transformRecordsUsingLatestDefaultXslt( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestBody List records) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return datasetService.transformRecordsUsingLatestDefaultXslt(metisUserView, datasetId, records); - } - - /** - * Get a dataset based on its datasetName - * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param datasetName the name of the dataset used to find a dataset - * @return {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_DATASETNAME, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public Dataset getByDatasetName(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetName") String datasetName) - throws GenericMetisException { - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - Dataset dataset = datasetService.getDatasetByDatasetName(metisUserView, datasetName); - LOGGER.info("Dataset with datasetName '{}' found", dataset.getDatasetName()); - return dataset; - } - - /** - * Get a list of all the datasets using the provider field for lookup. - *

The results are paged and wrapped around {@link ResponseListWrapper}

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param provider the provider used to search - * @param nextPage the nextPage number or -1 - * @return {@link ResponseListWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_PROVIDER, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllDatasetsByProvider( - @RequestHeader("Authorization") String authorization, - @PathVariable("provider") String provider, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper - .setResultsAndLastPage( - datasetService.getAllDatasetsByProvider(metisUserView, provider, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - /** - * Get a list of all the datasets using the intermediateProvider field for lookup. - *

The results are paged and wrapped around {@link ResponseListWrapper}

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param intermediateProvider the intermediateProvider used to search - * @param nextPage the nextPage number or -1 - * @return {@link ResponseListWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_INTERMEDIATE_PROVIDER, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllDatasetsByIntermediateProvider( - @RequestHeader("Authorization") String authorization, - @PathVariable("intermediateProvider") String intermediateProvider, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper - .setResultsAndLastPage( - datasetService - .getAllDatasetsByIntermediateProvider(metisUserView, intermediateProvider, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - /** - * Get a list of all the datasets using the dataProvider field for lookup. - *

The results are paged and wrapped around {@link ResponseListWrapper}

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param dataProvider the dataProvider used to search - * @param nextPage the nextPage number or -1 - * @return {@link ResponseListWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_DATAPROVIDER, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllDatasetsByDataProvider( - @RequestHeader("Authorization") String authorization, - @PathVariable("dataProvider") String dataProvider, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper - .setResultsAndLastPage( - datasetService.getAllDatasetsByDataProvider(metisUserView, dataProvider, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - /** - * Get a list of all the datasets using the organizationId field for lookup. - *

The results are paged and wrapped around {@link ResponseListWrapper}

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param organizationId the organizationId used to search - * @param nextPage the nextPage number or -1 - * @return {@link ResponseListWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_ORGANIZATION_ID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllDatasetsByOrganizationId( - @RequestHeader("Authorization") String authorization, - @PathVariable("organizationId") String organizationId, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper - .setResultsAndLastPage( - datasetService.getAllDatasetsByOrganizationId(metisUserView, organizationId, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - /** - * Get a list of all the datasets using the organizationName field for lookup. - *

The results are paged and wrapped around {@link ResponseListWrapper}

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @param organizationName the organizationName used to search - * @param nextPage the nextPage number or -1 - * @return {@link ResponseListWrapper} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_ORGANIZATION_NAME, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllDatasetsByOrganizationName( - @RequestHeader("Authorization") String authorization, - @PathVariable("organizationName") String organizationName, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper - .setResultsAndLastPage( - datasetService.getAllDatasetsByOrganizationName(metisUserView, organizationName, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - /** - * Get all available countries that can be used. - *

The list is retrieved based on an internal enum

- * - * @param authorization the String provided by an HTTP Authorization header

The expected input should follow the rule Bearer - * accessTokenHere

- * @return The list of countries that are serialized based on {@link eu.europeana.metis.core.common.CountrySerializer} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_COUNTRIES, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public List getDatasetsCountries( - @RequestHeader("Authorization") String authorization) throws GenericMetisException { - authenticationClient.getUserByAccessTokenInHeader(authorization); - return Country.getCountryListSortedByName().stream().map(CountryView::new) - .toList(); - } - - /** - * Get all available languages that can be used. - *

The list is retrieved based on an internal enum

- * - * @param authorization the String provided by an HTTP Authorization header - *

The expected input should follow the rule Bearer accessTokenHere

- * @return The list of countries that are serialized based on {@link eu.europeana.metis.core.common.LanguageSerializer} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_LANGUAGES, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public List getDatasetsLanguages( - @RequestHeader("Authorization") String authorization) throws GenericMetisException { - authenticationClient.getUserByAccessTokenInHeader(authorization); - return Language.getLanguageListSortedByName().stream().map(LanguageView::new) - .toList(); - } - - /** - * Get the list of of matching DatasetSearch using dataset - * - * @param authorization the String provided by an HTTP Authorization header - *

The expected input should follow the rule Bearer accessTokenHere

- * @param searchString a string that may contain multiple words separated by spaces. - *

The search will be performed on the fields datasetId, datasetName, provider, dataProvider. - * The words that start with a numeric character will be considered as part of the datasetId search and that field is searched - * as a "starts with" operation. All words that from a certain length threshold and above e.g. 3 will be used, as AND - * operations, for searching the fields datasetName, provider, dataProvider

- * @param nextPage the nextPage number, must be positive - * @return a list with the dataset search view results - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if the parameters provided are invalid.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - @GetMapping(value = RestEndpoints.DATASETS_SEARCH, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getDatasetSearch( - @RequestHeader("Authorization") String authorization, - @RequestParam(value = "searchString") String searchString, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - - final MetisUserView metisUserView = authenticationClient - .getUserByAccessTokenInHeader(authorization); - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - responseListWrapper.setResultsAndLastPage( - datasetService.searchDatasetsBasedOnSearchString(metisUserView, searchString, nextPage), - datasetService.getDatasetsPerRequestLimit(), nextPage); - LOGGER.info(CommonStringValues.BATCH_OF_DATASETS_RETURNED, responseListWrapper.getListSize(), - nextPage); - return responseListWrapper; - } - - private static class CountryView { - - @JsonProperty("enum") - private final String enumName; - @JsonProperty - private final String name; - @JsonProperty - private final String isoCode; - - CountryView(Country country) { - this.enumName = country.name(); - this.name = country.getName(); - this.isoCode = country.getIsoCode(); - } - } - - private static class LanguageView { - - @JsonProperty("enum") - private final String enumName; - @JsonProperty - private final String name; - - LanguageView(Language language) { - this.enumName = language.name(); - this.name = language.getName(); - } - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DepublishRecordIdController.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DepublishRecordIdController.java deleted file mode 100644 index 9c652ab1ba..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/DepublishRecordIdController.java +++ /dev/null @@ -1,226 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.rest.DepublicationInfoView; -import eu.europeana.metis.core.service.DepublishRecordIdService; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.multipart.MultipartFile; - -/** - * Controller for calls related to depublish record ids. - */ -@RestController -public class DepublishRecordIdController { - - private static final Logger LOGGER = LoggerFactory.getLogger(DepublishRecordIdController.class); - private static final Pattern CRLF_PATTERN = Pattern - .compile(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX); - - private final DepublishRecordIdService depublishRecordIdService; - private final AuthenticationClient authenticationClient; - - /** - * Autowired constructor with all required parameters. - * - * @param depublishRecordIdService the service for depublished records. - * @param authenticationClient the java client to communicate with the external authentication service - */ - @Autowired - public DepublishRecordIdController(DepublishRecordIdService depublishRecordIdService, - AuthenticationClient authenticationClient) { - this.depublishRecordIdService = depublishRecordIdService; - this.authenticationClient = authenticationClient; - } - - /** - * Adds a list of record ids to be depublished for the dataset - the version for a simple text body. - * - * @param authorization the HTTP Authorization header, in the form of a Bearer Access Token. - * @param datasetId The dataset ID to which the depublish record ids belong. - * @param recordIdsInSeparateLines The string containing the record IDs in separate lines. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content or the operation were invalid
  • - *
- */ - @PostMapping(value = RestEndpoints.DEPUBLISH_RECORDIDS_DATASETID, consumes = { - MediaType.TEXT_PLAIN_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public void createRecordIdsToBeDepublished(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, @RequestBody String recordIdsInSeparateLines - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final int added = depublishRecordIdService - .addRecordIdsToBeDepublished(metisUserView, datasetId, recordIdsInSeparateLines); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("{} Depublish record ids added to dataset with datasetId: {}", added, - CRLF_PATTERN.matcher(datasetId).replaceAll("")); - } - } - - /** - * Adds a list of record ids to be depublished for the dataset - the version for a multipart file. - * - * @param authorization the HTTP Authorization header, in the form of a Bearer Access Token. - * @param datasetId The dataset ID to which the depublish record ids belong. - * @param recordIdsFile The file containing the record IDs in separate lines. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content or the operation were invalid
  • - *
- * @throws IOException In case something unexpected went wrong reading the request body. - */ - @PostMapping(value = RestEndpoints.DEPUBLISH_RECORDIDS_DATASETID, consumes = { - MediaType.MULTIPART_FORM_DATA_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public void createRecordIdsToBeDepublished(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestPart("depublicationFile") MultipartFile recordIdsFile - ) throws GenericMetisException, IOException { - createRecordIdsToBeDepublished(authorization, datasetId, - new String(recordIdsFile.getBytes(), StandardCharsets.UTF_8)); - } - - /** - * Deletes a list of record ids from the database. Only record ids that are in a - * {@link eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus#PENDING_DEPUBLICATION} state will be removed. - * - * @param authorization the HTTP Authorization header, in the form of a Bearer Access Token. - * @param datasetId The dataset ID to which the depublish record ids belong. - * @param recordIdsInSeparateLines The string containing the record IDs in separate lines. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content or the operation were invalid
  • - *
- */ - @DeleteMapping(value = RestEndpoints.DEPUBLISH_RECORDIDS_DATASETID, consumes = { - MediaType.TEXT_PLAIN_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void deletePendingRecordIds(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, @RequestBody String recordIdsInSeparateLines - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final Long removedRecordIds = depublishRecordIdService - .deletePendingRecordIds(metisUserView, datasetId, recordIdsInSeparateLines); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("{} Depublish record ids removed from database with datasetId: {}", - removedRecordIds, CRLF_PATTERN.matcher(datasetId).replaceAll("")); - } - } - - /** - * Retrieve the list of depublish record ids for a specific dataset. - * - * @param authorization the HTTP Authorization header, in the form of a Bearer Access Token. - * @param datasetId The ID of the dataset for which to retrieve the records. - * @param page The page to retrieve. - * @param sortField The field on which to sort. - * @param sortAscending The direction in which to sort. - * @param searchQuery Search query for the record ID. - * @return The list of records along with some other information regarding the depublication. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - @GetMapping(value = RestEndpoints.DEPUBLISH_RECORDIDS_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public DepublicationInfoView getDepublishRecordIds( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "page", defaultValue = "0") int page, - @RequestParam(value = "sortField", required = false) DepublishRecordIdSortField sortField, - @RequestParam(value = "sortAscending", defaultValue = "" + true) boolean sortAscending, - @RequestParam(value = "searchQuery", required = false) String searchQuery - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final var recordIds = depublishRecordIdService.getDepublishRecordIds(metisUserView, datasetId, page, - sortField == null ? DepublishRecordIdSortField.RECORD_ID : sortField, - sortAscending ? SortDirection.ASCENDING : SortDirection.DESCENDING, searchQuery); - final var canDepublish = depublishRecordIdService.canTriggerDepublication(metisUserView, datasetId); - return new DepublicationInfoView(recordIds, canDepublish); - } - - /** - * Does checking, prepares and adds a WorkflowExecution with a single Depublish step in the queue. That means it updates the - * status of the WorkflowExecution to {@link eu.europeana.metis.core.workflow.WorkflowStatus#INQUEUE}, adds it to the database - * and also it's identifier goes into the distributed queue of WorkflowExecutions. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier for which the execution will take place - * @param datasetDepublish true for dataset depublication, false for record depublication - * @param priority the priority of the execution in case the system gets overloaded, 0 lowest, 10 highest - * @param recordIdsInSeparateLines the specific pending record ids to depublish. Only record ids that are marked as - * {@link eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus#PENDING_DEPUBLICATION} in the database will be - * attempted for depublication. - * @return the WorkflowExecution object that was generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if the workflow is empty or no plugin enabled
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if there was an exception when - * contacting the external resource(ECloud)
  • - *
  • {@link eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed} if the execution of - * the first plugin was not allowed, because a valid source plugin could not be found
  • - *
  • {@link eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException} if a - * workflow execution for the generated execution identifier already exists, almost impossible to - * happen since ids are UUIDs
  • - *
- */ - @PostMapping(value = RestEndpoints.DEPUBLISH_EXECUTE_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - @ResponseBody - public WorkflowExecution addDepublishWorkflowInQueueOfWorkflowExecutions( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "datasetDepublish", defaultValue = "" + true) boolean datasetDepublish, - @RequestParam(value = "priority", defaultValue = "0") int priority, - @RequestBody(required = false) String recordIdsInSeparateLines) - throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return depublishRecordIdService - .createAndAddInQueueDepublishWorkflowExecution(metisUserView, datasetId, - datasetDepublish, priority, recordIdsInSeparateLines); - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/OrchestratorController.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/OrchestratorController.java deleted file mode 100644 index 53a5a302a9..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/OrchestratorController.java +++ /dev/null @@ -1,616 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.rest.ExecutionHistory; -import eu.europeana.metis.core.rest.IncrementalHarvestingAllowedView; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.rest.VersionEvolution; -import eu.europeana.metis.core.rest.execution.details.WorkflowExecutionView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionAndDatasetView; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.util.Date; -import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.format.annotation.DateTimeFormat; -import org.springframework.format.annotation.DateTimeFormat.ISO; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; - -/** - * Contains all the calls that are related to Orchestration. - *

The {@link OrchestratorService} has control on how to orchestrate different components of the - * system

- */ -@RestController -public class OrchestratorController { - - private static final Logger LOGGER = LoggerFactory.getLogger(OrchestratorController.class); - private final OrchestratorService orchestratorService; - private final AuthenticationClient authenticationClient; - - /** - * Autowired constructor with all required parameters. - * - * @param orchestratorService the orchestratorService object - * @param authenticationClient the client for the authentication service - */ - @Autowired - public OrchestratorController(OrchestratorService orchestratorService, - AuthenticationClient authenticationClient) { - this.orchestratorService = orchestratorService; - this.authenticationClient = authenticationClient; - } - - /** - * Create a workflow using a datasetId and the {@link Workflow} that contains the requested plugins. If plugins are disabled, - * they (their settings) are still saved. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier to relate the workflow to - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @param workflow the Workflow will all it's requested plugins - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.WorkflowAlreadyExistsException} if a workflow - * for the dataset identifier provided already exists
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - //WORKFLOWS - @PostMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, consumes = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public void createWorkflow( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "enforcedPluginType", required = false, defaultValue = "") ExecutablePluginType enforcedPredecessorType, - @RequestBody Workflow workflow) - throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - orchestratorService.createWorkflow(metisUserView, datasetId, workflow, enforcedPredecessorType); - } - - /** - * Update an already existent workflow using a datasetId and the {@link Workflow} that contains the requested plugins. If - * plugins are disabled, they (their settings) are still saved. Any settings in plugins that are not sent in the request are - * removed. - * - * @param authorization the authorization header with the access token - * @param datasetId the identifier of the dataset for which the workflow should be updated - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @param workflow the workflow with the plugins requested - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowFoundException} if a workflow for the - * dataset identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @PutMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void updateWorkflow( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "enforcedPluginType", required = false, defaultValue = "") ExecutablePluginType enforcedPredecessorType, - @RequestBody Workflow workflow) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - orchestratorService.updateWorkflow(metisUserView, datasetId, workflow, enforcedPredecessorType); - } - - /** - * Deletes a workflow. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier that corresponds to the workflow to be deleted - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @DeleteMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void deleteWorkflow( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - orchestratorService.deleteWorkflow(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Workflow with datasetId '{}' deleted", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - } - - /** - * Get a workflow for a dataset identifier. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier - * @return the Workflow object - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public Workflow getWorkflow( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - Workflow workflow = orchestratorService.getWorkflow(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Workflow with datasetId '{}' found", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - return workflow; - } - - //WORKFLOW EXECUTIONS - - /** - * Does checking, prepares and adds a WorkflowExecution in the queue. That means it updates the status of the WorkflowExecution - * to {@link WorkflowStatus#INQUEUE}, adds it to the database and also it's identifier goes into the distributed queue of - * WorkflowExecutions. The source data for the first plugin in the workflow can be controlled, if required, from the - * {@code enforcedPredecessorType}, which means that the last valid plugin that is provided with that parameter, will be used as - * the source data. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier for which the execution will take place - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @param priority the priority of the execution in case the system gets overloaded, 0 lowest, 10 highest - * @return the WorkflowExecution object that was generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowFoundException} if a workflow for the - * dataset identifier provided does not exist
  • - *
  • {@link BadContentException} if the workflow is empty or no plugin enabled
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if there was an exception when - * contacting the external resource(ECloud)
  • - *
  • {@link eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed} if the execution of - * the first plugin was not allowed, because a valid source plugin could not be found
  • - *
  • {@link eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException} if a - * workflow execution for the generated execution identifier already exists, almost impossible to - * happen since ids are UUIDs
  • - *
- */ - @PostMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public WorkflowExecution addWorkflowInQueueOfWorkflowExecutions( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "enforcedPluginType", required = false, defaultValue = "") ExecutablePluginType enforcedPredecessorType, - @RequestParam(value = "priority", defaultValue = "0") int priority) - throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - WorkflowExecution workflowExecution = orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, datasetId, null, enforcedPredecessorType, - priority); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("WorkflowExecution for datasetId '{}' added to queue", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - return workflowExecution; - } - - /** - * Request to cancel a workflow execution. The execution will go into a cancelling state until it's properly - * {@link WorkflowStatus#CANCELLED} from the system - * - * @param authorization the authorization header with the access token - * @param executionId the execution identifier of the execution to cancel - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * worklfowExecution could be found
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier of the workflow does not exist
  • - *
- */ - @DeleteMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void cancelWorkflowExecution( - @RequestHeader("Authorization") String authorization, - @PathVariable("executionId") String executionId) - throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - orchestratorService.cancelWorkflowExecution(metisUserView, executionId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("WorkflowExecution for executionId '{}' is cancelling", - executionId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - } - - /** - * Get a WorkflowExecution using an execution identifier. - * - * @param authorization the authorization header with the access token - * @param executionId the execution identifier - * @return the WorkflowExecution object - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public WorkflowExecution getWorkflowExecutionByExecutionId( - @RequestHeader("Authorization") String authorization, - @PathVariable("executionId") String executionId) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - WorkflowExecution workflowExecution = orchestratorService - .getWorkflowExecutionByExecutionId(metisUserView, executionId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("WorkflowExecution with executionId '{}' {}found.", - executionId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - workflowExecution == null ? "not " : ""); - } - return workflowExecution; - } - - /** - * This method returns whether currently it is permitted/possible to perform incremental harvesting for the given dataset. - * - * @param authorization the authorization header with the access token - * @param datasetId The ID of the dataset for which to check. - * @return Whether we can perform incremental harvesting for the dataset. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this task
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_ALLOWED_INCREMENTAL, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public IncrementalHarvestingAllowedView isIncrementalHarvestingAllowed( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return new IncrementalHarvestingAllowedView( - orchestratorService.isIncrementalHarvestingAllowed(metisUserView, datasetId)); - } - - /** - * Check if a specified {@code pluginType} is allowed for execution. This is checked based on, if there was a previous - * successful finished plugin that follows a specific order (unless the {@code enforcedPredecessorType} is used) and that has - * the latest successful harvest plugin as an ancestor. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier of which the executions are based on - * @param pluginType the pluginType to be checked for allowance of execution - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @return the abstractMetisPlugin that the execution on {@code pluginType} will be based on. Can be null if the - * {@code pluginType} is the first one in the total order of executions e.g. One of the harvesting plugins. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed} if the no plugin was - * found so the {@code pluginType} will be based upon.
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_ALLOWED_PLUGIN, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public MetisPlugin getLatestFinishedPluginWorkflowExecutionByDatasetIdIfPluginTypeAllowedForExecution( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam("pluginType") ExecutablePluginType pluginType, - @RequestParam(value = "enforcedPluginType", required = false, defaultValue = "") ExecutablePluginType enforcedPredecessorType) - throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - MetisPlugin latestFinishedPluginWorkflowExecutionByDatasetId = orchestratorService - .getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution(metisUserView, datasetId, - pluginType, enforcedPredecessorType); - if (latestFinishedPluginWorkflowExecutionByDatasetId == null) { - LOGGER.info("PluginType allowed by default"); - } else { - LOGGER.info("Latest Plugin WorkflowExecution with id '{}' found", - latestFinishedPluginWorkflowExecutionByDatasetId.getId()); - } - return latestFinishedPluginWorkflowExecutionByDatasetId; - } - - /** - * Retrieve dataset level information of past executions {@link DatasetExecutionInformation} - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier to generate the information for - * @return the structured class containing all the execution information - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_INFORMATION, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public DatasetExecutionInformation getDatasetExecutionInformation( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.debug("Requesting dataset execution information for datasetId: {}", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return orchestratorService.getDatasetExecutionInformation(metisUserView, datasetId); - } - - /** - * Get all WorkflowExecutions paged. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier filter - * @param workflowStatuses a set of workflow statuses to filter, can be empty or null - * @param orderField the field to be used to sort the results - * @param ascending a boolean value to request the ordering to ascending or descending - * @param nextPage the nextPage token, the end of the list is marked with -1 on the response - * @return a list of all the WorkflowExecutions found - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if paging is not correctly provided
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllWorkflowExecutionsByDatasetId( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId, - @RequestParam(value = "workflowStatus", required = false) Set workflowStatuses, - @RequestParam(value = "orderField", required = false, defaultValue = "ID") DaoFieldNames orderField, - @RequestParam(value = "ascending", required = false, defaultValue = "true") boolean ascending, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final ResponseListWrapper result = - orchestratorService.getAllWorkflowExecutions(metisUserView, datasetId, workflowStatuses, - orderField, ascending, nextPage); - logPaging(result, nextPage); - return result; - } - - /** - * Get all WorkflowExecutions paged. Not filtered by datasetId. - *

- * TODO JV This endpoint is no longer in use. Consider removing it. - * - * @param authorization the authorization header with the access token - * @param workflowStatuses a set of workflow statuses to filter, can be empty or null - * @param orderField the field to be used to sort the results - * @param ascending a boolean value to request the ordering to ascending or descending - * @param nextPage the nextPage token, the end of the list is marked with -1 on the response - * @return a list of all the WorkflowExecutions found - * @throws GenericMetisException which can be one of: - *

    - *
  • {@link BadContentException} if paging is not correctly provided
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllWorkflowExecutions( - @RequestHeader("Authorization") String authorization, - @RequestParam(value = "workflowStatus", required = false) Set workflowStatuses, - @RequestParam(value = "orderField", required = false, defaultValue = "ID") DaoFieldNames orderField, - @RequestParam(value = "ascending", required = false, defaultValue = "true") boolean ascending, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final ResponseListWrapper result = - orchestratorService.getAllWorkflowExecutions(metisUserView, null, workflowStatuses, orderField, - ascending, nextPage); - logPaging(result, nextPage); - return result; - } - - /** - * Get the overview of WorkflowExecutions. This returns a list of executions ordered to display an overview. First the ones in - * queue, then those in progress and then those that are finalized. They will be sorted by creation date. This method does - * support pagination. - * - * @param authorization the authorization header with the access token - * @param pluginStatuses the plugin statuses to filter. Can be null. - * @param pluginTypes the plugin types to filter. Can be null. - * @param fromDate the date from where the results should start. Can be null. - * @param toDate the date to where the results should end. Can be null. - * @param nextPage the nextPage token, the end of the list is marked with -1 on the response - * @param pageCount the number of pages that is requested - * @return a list of all the WorkflowExecutions together with the datasets that they belong to. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if paging is not correctly provided
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_OVERVIEW, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getWorkflowExecutionsOverview( - @RequestHeader("Authorization") String authorization, - @RequestParam(value = "pluginStatus", required = false) Set pluginStatuses, - @RequestParam(value = "pluginType", required = false) Set pluginTypes, - @RequestParam(value = "fromDate", required = false) @DateTimeFormat(iso = ISO.DATE_TIME) Date fromDate, - @RequestParam(value = "toDate", required = false) @DateTimeFormat(iso = ISO.DATE_TIME) Date toDate, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage, - @RequestParam(value = "pageCount", required = false, defaultValue = "1") int pageCount) - throws GenericMetisException { - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - if (pageCount < 1) { - throw new BadContentException(CommonStringValues.PAGE_COUNT_CANNOT_BE_ZERO_OR_NEGATIVE); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - final ResponseListWrapper result = - orchestratorService.getWorkflowExecutionsOverview(metisUserView, pluginStatuses, pluginTypes, - fromDate, toDate, nextPage, pageCount); - logPaging(result, nextPage); - return result; - } - - private static void logPaging(ResponseListWrapper responseListWrapper, int nextPage) { - LOGGER.debug("Batch of: {} workflowExecutions returned, using batch nextPage: {}", - responseListWrapper.getListSize(), nextPage); - } - - /** - * Retrieve dataset level history of past executions {@link ExecutionHistory} - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier to generate the history for - * @return the structured class containing all the execution history, ordered by date descending. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_HISTORY, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ExecutionHistory getDatasetExecutionHistory( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.debug("Requesting dataset execution history for datasetId: {}", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return orchestratorService.getDatasetExecutionHistory(metisUserView, datasetId); - } - - /** - * Retrieve a list of executable plugins with data availability {@link PluginsWithDataAvailability} for a given workflow - * execution. - * - * @param authorization the authorization header with the access token - * @param executionId the identifier of the execution for which to get the plugins - * @return the structured class containing all the execution history, ordered by date descending. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if an - * non-existing execution ID or version is provided.
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID_PLUGINS_DATA_AVAILABILITY, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public PluginsWithDataAvailability getExecutablePluginsWithDataAvailability( - @RequestHeader("Authorization") String authorization, - @PathVariable("executionId") String executionId) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - final String logSanitizedExecutionId = executionId.replaceAll("[\r\n]", ""); - LOGGER.debug("Requesting plugins with data availability for executionId: {}", logSanitizedExecutionId); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return orchestratorService.getExecutablePluginsWithDataAvailability(metisUserView, executionId); - } - - /** - * Get the evolution of the records from when they were first imported until (and excluding) the specified version. - * - * @param authorization The authorization header with the access token - * @param workflowExecutionId The ID of the workflow exection in which the version is created. - * @param pluginType The step within the workflow execution that created the version. - * @return The record evolution. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if an - * non-existing execution ID or version is provided.
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_EVOLUTION, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public VersionEvolution getRecordEvolutionForVersion( - @RequestHeader("Authorization") String authorization, - @PathVariable("workflowExecutionId") String workflowExecutionId, - @PathVariable("pluginType") PluginType pluginType - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return orchestratorService - .getRecordEvolutionForVersion(metisUserView, workflowExecutionId, pluginType); - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ProxiesController.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ProxiesController.java deleted file mode 100644 index 355138edaf..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ProxiesController.java +++ /dev/null @@ -1,376 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.rest.ListOfIds; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.RecordsResponse; -import eu.europeana.metis.core.rest.stats.NodePathStatistics; -import eu.europeana.metis.core.rest.stats.RecordStatistics; -import eu.europeana.metis.core.service.ProxiesService; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; - -/** - * Proxies Controller which encapsulates functionality that has to be proxied to an external resource. - */ -@RestController -public class ProxiesController { - - private static final Logger LOGGER = LoggerFactory.getLogger(ProxiesController.class); - private static final int NUMBER_OF_RECORDS = 5; - private final ProxiesService proxiesService; - private final AuthenticationClient authenticationClient; - - /** - * Constructor with required parameters - * - * @param proxiesService {@link ProxiesService} - * @param authenticationClient the client for the authentication service - */ - @Autowired - public ProxiesController(ProxiesService proxiesService, - AuthenticationClient authenticationClient) { - this.proxiesService = proxiesService; - this.authenticationClient = authenticationClient; - } - - /** - * Get logs from a specific topology task paged. - * - * @param authorization the authorization header with the access token - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param from integer to start getting logs from - * @param to integer until where logs should be received - * @return the list of logs - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.cloud.service.dps.exception.DpsException} if an error occurred while - * retrieving the logs from the external resource
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_LOGS, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public List getExternalTaskLogs( - @RequestHeader("Authorization") String authorization, - @PathVariable("topologyName") String topologyName, - @PathVariable("externalTaskId") long externalTaskId, - @RequestParam(value = "from") int from, - @RequestParam(value = "to") int to) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.info( - "Requesting proxy call task logs for topologyName: {}, externalTaskId: {}, from: {}, to: {}", - topologyName.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - externalTaskId, from, to); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService.getExternalTaskLogs(metisUserView, topologyName, externalTaskId, from, to); - } - - /** - * Check if final report is available. - * - * @param authorization the authorization header with the access token - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @return true if final report available, false if not or ecloud response {@link jakarta.ws.rs.core.Response.Status)} is not - * OK, based on {@link eu.europeana.cloud.client.dps.rest.DpsClient#checkIfErrorReportExists} - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_REPORT_EXISTS, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public Map existsExternalTaskReport( - @RequestHeader("Authorization") String authorization, - @PathVariable("topologyName") String topologyName, - @PathVariable("externalTaskId") long externalTaskId) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.info( - "Requesting proxy call to check if task report exists for topologyName: {}, externalTaskId: {}", - topologyName.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - externalTaskId); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return Collections.singletonMap("existsExternalTaskReport", - proxiesService.existsExternalTaskReport(metisUserView, topologyName, externalTaskId)); - } - - /** - * Get the final report that includes all the errors grouped. The number of ids per error can be specified through the - * parameters. - * - * @param authorization the authorization header with the access token - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param idsPerError the number of ids that should be displayed per error group - * @return the list of errors grouped - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.cloud.service.dps.exception.DpsException} if an error occurred while - * retrieving the report from the external resource
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_REPORT, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public TaskErrorsInfo getExternalTaskReport( - @RequestHeader("Authorization") String authorization, - @PathVariable("topologyName") String topologyName, - @PathVariable("externalTaskId") long externalTaskId, - @RequestParam("idsPerError") int idsPerError) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Requesting proxy call task reports for topologyName: {}, externalTaskId: {}", - topologyName.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - externalTaskId); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService - .getExternalTaskReport(metisUserView, topologyName, externalTaskId, idsPerError); - } - - /** - * Get the statistics on the given task. - * - * @param authorization the authorization header with the access token - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @return the task statistics - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.cloud.service.dps.exception.DpsException} if an error occurred while - * retrieving the statistics from the external resource
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_STATISTICS, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public RecordStatistics getExternalTaskStatistics( - @RequestHeader("Authorization") String authorization, - @PathVariable("topologyName") String topologyName, - @PathVariable("externalTaskId") long externalTaskId) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Requesting proxy call task statistics for topologyName: {}, externalTaskId: {}", - topologyName.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - externalTaskId); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService.getExternalTaskStatistics(metisUserView, topologyName, externalTaskId); - } - - /** - * Get additional statistics on a node. This method can be used to elaborate on one of the items returned by - * {@link #getExternalTaskStatistics(String, String, long)}. - * - * @param authorization the authorization header with the access token - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param nodePath the path of the node for which this request is made - * @return the list of errors grouped - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.cloud.service.dps.exception.DpsException} if an error occurred while - * retrieving the statistics from the external resource
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_NODE_STATISTICS, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public NodePathStatistics getAdditionalNodeStatistics( - @RequestHeader("Authorization") String authorization, - @PathVariable("topologyName") String topologyName, - @PathVariable("externalTaskId") long externalTaskId, - @RequestParam("nodePath") String nodePath) throws GenericMetisException { - if (LOGGER.isInfoEnabled()) { - LOGGER.info( - "Requesting proxy call additional node statistics for topologyName: {}, externalTaskId: {}", - topologyName.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, ""), - externalTaskId); - } - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService - .getAdditionalNodeStatistics(metisUserView, topologyName, externalTaskId, nodePath); - } - - /** - * Get a list with record contents from the external resource based on an workflow execution and {@link PluginType}. - * - * @param authorization the authorization header with the access token - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link PluginType} that is to be located inside the workflow - * @param nextPage the string representation of the next page which is provided from the response and can be used to get the - * next page of results. - * TODO: The nextPage parameter is currently ignored and we should decide if we would support it again in the future. - * @return the list of records from the external resource - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided identifier
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public RecordsResponse getListOfFileContentsFromPluginExecution( - @RequestHeader("Authorization") String authorization, - @RequestParam("workflowExecutionId") String workflowExecutionId, - @RequestParam("pluginType") ExecutablePluginType pluginType, - @RequestParam(value = "nextPage", required = false) String nextPage - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, workflowExecutionId, pluginType, - StringUtils.isEmpty(nextPage) ? null : nextPage, NUMBER_OF_RECORDS); - } - - /** - * Get a list with record contents from the external resource for a specific list of IDS based on a workflow execution and - * {@link PluginType}. - * - * @param authorization the authorization header with the access token - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param ecloudIds the list of ecloud IDs of the records we wish to obtain - * @return the list of records from the external resource matching the input ID list. If no record with the matching ID was - * found in the given workflow step, no entry for this record will appear in the result list. - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - @PostMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS, - consumes = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public RecordsResponse getListOfFileContentsFromPluginExecution( - @RequestHeader("Authorization") String authorization, - @RequestParam("workflowExecutionId") String workflowExecutionId, - @RequestParam("pluginType") ExecutablePluginType pluginType, - @RequestBody ListOfIds ecloudIds - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService.getListOfFileContentsFromPluginExecution(metisUserView, workflowExecutionId, - pluginType, ecloudIds); - } - - /** - * Get an eCloudId from the external resource for a specific searchId. - * - * @param authorization the authorization header with the access token - * @param workflowExecutionId the execution identifier of the workflow - * @param idToSearch the ID we are searching for and for which we want to find a record - * @return the CloudId from the external resource matching the input ID. If no record with the matching ID was found, it will - * return an empty string. - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - @PostMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_RECORD_SEARCH_BY_ID, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public Record searchRecordByIdFromPluginExecution( - @RequestHeader("Authorization") String authorization, - @RequestParam("workflowExecutionId") String workflowExecutionId, - @RequestParam("pluginType") ExecutablePluginType pluginType, - @RequestParam("idToSearch") String idToSearch - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService.searchRecordByIdFromPluginExecution(metisUserView, workflowExecutionId, pluginType, idToSearch); - } - - /** - * Get a list with record contents from the external resource for a specific list of IDS based on a workflow execution and the - * predecessor of the given {@link PluginType}. - * - * @param authorization the authorization header with the access token - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param ecloudIds the list of ecloud IDs of the records we wish to obtain - * @return the list of records from the external resource matching the input ID list. If no record with the matching ID was - * found in the given workflow step, no entry for this record will appear in the result list. - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - @PostMapping(value = RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_FROM_PREDECESSOR_PLUGIN, - consumes = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, - produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public RecordsResponse getListOfFileContentsFromPredecessorOfPluginExecution( - @RequestHeader("Authorization") String authorization, - @RequestParam("workflowExecutionId") String workflowExecutionId, - @RequestParam("pluginType") ExecutablePluginType pluginType, - @RequestBody ListOfIds ecloudIds - ) throws GenericMetisException { - final MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - return proxiesService.getListOfFileContentsFromPredecessorPluginExecution(metisUserView, workflowExecutionId, pluginType, - ecloudIds); - } - -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ScheduleWorkflowController.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ScheduleWorkflowController.java deleted file mode 100644 index 07026db579..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/controller/ScheduleWorkflowController.java +++ /dev/null @@ -1,159 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; - -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.ScheduledWorkflowAlreadyExistsException; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.service.ScheduleWorkflowService; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; - -/** - * Contains all the calls that are related to scheduling workflows. - *

The {@link ScheduleWorkflowService} has control on how to schedule workflows

- * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-05 - */ -@RestController -public class ScheduleWorkflowController { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScheduleWorkflowController.class); - private final ScheduleWorkflowService scheduleWorkflowService; - private final AuthenticationClient authenticationClient; - - public ScheduleWorkflowController(ScheduleWorkflowService scheduleWorkflowService, - AuthenticationClient authenticationClient) { - this.scheduleWorkflowService = scheduleWorkflowService; - this.authenticationClient = authenticationClient; - } - - /** - * Schedules a provided workflow. - * - * @param authorization the authorization header with the access token - * @param scheduledWorkflow the scheduled workflow information - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content send was not acceptable
  • - *
  • {@link NoWorkflowFoundException} if the workflow for a dataset was not found
  • - *
  • {@link ScheduledWorkflowAlreadyExistsException} if a scheduled workflow already exists
  • - *
- */ - @PostMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE, consumes = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.CREATED) - public void scheduleWorkflowExecution(@RequestHeader("Authorization") String authorization, - @RequestBody ScheduledWorkflow scheduledWorkflow) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - scheduleWorkflowService.scheduleWorkflow(metisUserView, scheduledWorkflow); - if (LOGGER.isInfoEnabled()) { - LOGGER.info( - "ScheduledWorkflowExecution for datasetId '{}', pointerDate at '{}', scheduled '{}'", - CRLF_PATTERN.matcher(scheduledWorkflow.getDatasetId()), scheduledWorkflow.getPointerDate(), - CRLF_PATTERN.matcher(scheduledWorkflow.getScheduleFrequence().name()).replaceAll("")); - } - } - - /** - * Get a scheduled workflow based on datasets identifier. - * - * @param authorization the authorization header with the access token - * @param datasetId the dataset identifier of which a scheduled workflow is to be retrieved - * @return the scheduled workflow - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if user is unauthorized to access the scheduled - * workflow
  • - *
  • {@link NoDatasetFoundException} if dataset identifier does not exist
  • - *
- */ - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ScheduledWorkflow getScheduledWorkflow( - @RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - ScheduledWorkflow scheduledWorkflow = scheduleWorkflowService - .getScheduledWorkflowByDatasetId(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("ScheduledWorkflow with with datasetId '{}' found", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - return scheduledWorkflow; - } - - @GetMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.OK) - public ResponseListWrapper getAllScheduledWorkflows( - @RequestHeader("Authorization") String authorization, - @RequestParam(value = "nextPage", required = false, defaultValue = "0") int nextPage) - throws GenericMetisException { - - if (nextPage < 0) { - throw new BadContentException(CommonStringValues.NEXT_PAGE_CANNOT_BE_NEGATIVE); - } - ResponseListWrapper responseListWrapper = new ResponseListWrapper<>(); - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - responseListWrapper.setResultsAndLastPage(scheduleWorkflowService - .getAllScheduledWorkflows(metisUserView, ScheduleFrequence.NULL, nextPage), - scheduleWorkflowService.getScheduledWorkflowsPerRequest(), nextPage); - LOGGER.info("Batch of: {} scheduledWorkflows returned, using batch nextPage: {}", - responseListWrapper.getListSize(), nextPage); - return responseListWrapper; - } - - @PutMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void updateScheduledWorkflow(@RequestHeader("Authorization") String authorization, - @RequestBody ScheduledWorkflow scheduledWorkflow) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - scheduleWorkflowService.updateScheduledWorkflow(metisUserView, scheduledWorkflow); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("ScheduledWorkflow with with datasetId '{}' updated", - CRLF_PATTERN.matcher(scheduledWorkflow.getDatasetId()).replaceAll("")); - } - } - - @DeleteMapping(value = RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, produces = { - MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) - @ResponseStatus(HttpStatus.NO_CONTENT) - public void deleteScheduledWorkflowExecution(@RequestHeader("Authorization") String authorization, - @PathVariable("datasetId") String datasetId) throws GenericMetisException { - MetisUserView metisUserView = authenticationClient.getUserByAccessTokenInHeader(authorization); - scheduleWorkflowService.deleteScheduledWorkflow(metisUserView, datasetId); - if (LOGGER.isInfoEnabled()) { - LOGGER.info("ScheduledWorkflowExecution for datasetId '{}' deleted", - datasetId.replaceAll(CommonStringValues.REPLACEABLE_CRLF_CHARACTERS_REGEX, "")); - } - } -} diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/exception/RestResponseExceptionHandler.java b/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/exception/RestResponseExceptionHandler.java deleted file mode 100644 index 0a9046ecad..0000000000 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/exception/RestResponseExceptionHandler.java +++ /dev/null @@ -1,149 +0,0 @@ -package eu.europeana.metis.core.rest.exception; - -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.exception.StructuredExceptionWrapper; -import jakarta.servlet.http.HttpServletResponse; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.http.HttpStatus; -import org.springframework.http.converter.HttpMessageNotReadableException; -import org.springframework.web.HttpRequestMethodNotSupportedException; -import org.springframework.web.bind.MissingRequestHeaderException; -import org.springframework.web.bind.MissingServletRequestParameterException; -import org.springframework.web.bind.annotation.ControllerAdvice; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException; - -/** - * {@link ControllerAdvice} class that handles exceptions through spring. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-10 - */ -@ControllerAdvice -public class RestResponseExceptionHandler { - - private static final String AUTHORIZATION_HEADER = "Authorization"; - - /** - * Handle metis {@link GenericMetisException} which is one of the many metis exceptions. - *

Some examples e.g. {@link NoDatasetFoundException}, {@link NoWorkflowFoundException}...

- * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(value = {GenericMetisException.class}) - @ResponseBody - public StructuredExceptionWrapper handleException(Exception exception, - HttpServletResponse response) { - final ResponseStatus annotationResponseStatus = AnnotationUtils - .findAnnotation(exception.getClass(), ResponseStatus.class); - HttpStatus status = annotationResponseStatus == null ? HttpStatus.INTERNAL_SERVER_ERROR - : annotationResponseStatus.value(); - response.setStatus(status.value()); - return new StructuredExceptionWrapper(exception.getMessage()); - } - - /** - * Handler for specific classes to overwrite behaviour - * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(HttpMessageNotReadableException.class) - @ResponseBody - public StructuredExceptionWrapper handleMessageNotReadable( - HttpMessageNotReadableException exception, - HttpServletResponse response) { - response.setStatus(HttpStatus.NOT_ACCEPTABLE.value()); - return new StructuredExceptionWrapper( - "Message body not readable. It is missing or malformed\n" + exception.getMessage()); - } - - /** - * Handler for specific classes to overwrite behaviour - * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(MissingServletRequestParameterException.class) - @ResponseBody - public StructuredExceptionWrapper handleMissingParams( - MissingServletRequestParameterException exception, - HttpServletResponse response) { - response.setStatus(HttpStatus.NOT_ACCEPTABLE.value()); - return new StructuredExceptionWrapper(exception.getParameterName() + " parameter is missing"); - } - - /** - * Handler for specific classes to overwrite behaviour - * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(HttpRequestMethodNotSupportedException.class) - @ResponseBody - public StructuredExceptionWrapper handleMissingParams( - HttpRequestMethodNotSupportedException exception, - HttpServletResponse response) { - response.setStatus(HttpStatus.METHOD_NOT_ALLOWED.value()); - return new StructuredExceptionWrapper("Method not allowed: " + exception.getMessage()); - } - - /** - * Handler for specific classes to overwrite behaviour - * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(value = {IllegalStateException.class, - MethodArgumentTypeMismatchException.class}) - @ResponseBody - public StructuredExceptionWrapper handleMessageNotReadable(Exception exception, - HttpServletResponse response) { - response.setStatus(HttpStatus.NOT_ACCEPTABLE.value()); - return new StructuredExceptionWrapper( - "Request not readable.\n" + exception.getMessage()); - } - - /** - * Handler for specific classes to overwrite behaviour - * - * @param exception the exception thrown - * @param response the response that should be updated - * @return {@link StructuredExceptionWrapper} a json friendly class that contains the error - * message for the client - */ - @ExceptionHandler(value = MissingRequestHeaderException.class) - @ResponseBody - public StructuredExceptionWrapper handleMissingRequestHeaderException( - MissingRequestHeaderException exception, - HttpServletResponse response) { - - final StructuredExceptionWrapper output; - - if (AUTHORIZATION_HEADER.equalsIgnoreCase(exception.getHeaderName())) { - response.setStatus(HttpStatus.UNAUTHORIZED.value()); - output = new StructuredExceptionWrapper( - "Authorization header is missing in the request."); - } else { - output = new StructuredExceptionWrapper(exception.getMessage()); - response.setStatus(HttpStatus.BAD_REQUEST.value()); - } - - return output; - } -} diff --git a/metis-core/metis-core-rest/src/main/resources/application.properties.example b/metis-core/metis-core-rest/src/main/resources/application.properties.example deleted file mode 100644 index fa40cd4634..0000000000 --- a/metis-core/metis-core-rest/src/main/resources/application.properties.example +++ /dev/null @@ -1,113 +0,0 @@ -#Spring -logging.config=/data/logging/log4j2.xml -#logging.config=log4j2.xml -server.error.whitelabel.enabled=false -spring.servlet.multipart.max-file-size=5MB -spring.servlet.multipart.max-request-size=5MB -spring.autoconfigure.exclude=\ - org.springframework.boot.autoconfigure.mongo.embedded.EmbeddedMongoAutoConfiguration, \ - org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration, \ - org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration - -#Truststore -truststore.path= -truststore.password= - -#Orchestration -metis-core.maxConcurrentThreads=1 -metis-core.dpsMonitorCheckIntervalInSeconds=5 -metis-core.dpsConnectTimeoutInMilliseconds=10000 -metis-core.dpsReadTimeoutInMilliseconds=30000 -metis-core.failsafeMarginOfInactivityInSeconds=5 -metis-core.periodicFailsafeCheckInMilliseconds=60000 -metis-core.periodicSchedulerCheckInMilliseconds=90000 -metis-core.pollingTimeoutForCleaningCompletionServiceInMilliseconds=10000 -#If a task passed this cap the task will be cancelled -metis-core.periodOfNoProcessedRecordsChangeInMinutes=30 -metis-core.threadLimitThrottlingLevelWeak=16 -metis-core.threadLimitThrottlingLevelMedium=8 -metis-core.threadLimitThrottlingLevelStrong=4 -#Use this to specify the FQDN where the application will be hosted under -metis-core.baseUrl=https://metis-core-rest.test.eanadev.org -#Use this to specify the maximum execution list length that is served by -#Metis Core (regardless on whether the list is paginated). -metis-core.maxServedExecutionListLength=200 -metis-core.maxDepublishRecordIdsPerDataset=1000 -#Use this to specify the default sampling size for Link Checking -metis-core.linkCheckingDefaultSamplingSize=1000 -#Solr -metis-core.solrCommitPeriodInMinutes=15 -# Authentication -metis-core.authenticationBaseUrl= -# CORS -metis-core.allowedCorsHosts= - -#RabbitMq -rabbitmq.host= -rabbitmq.port= -rabbitmq.username= -rabbitmq.password= -rabbitmq.virtualHost=/ -rabbitmq.queueName=UserWorkflowExecution -rabbitmq.highestPriority=10 -rabbitmq.enableSsl= -rabbitmq.enableCustomTruststore= - -#Mongo -mongo.hosts= -mongo.ports= -mongo.authenticationDatabase= -mongo.username= -mongo.password= -mongo.enableSsl= -mongo.database=metis-core -mongo.applicationName=metis-core-local - -#Redis -redis.host= -redis.port= -redis.username= -redis.password= -redis.enableSsl= -redis.enableCustomTruststore= -redis.redisson.connectionPoolSize=16 -redis.redisson.connectTimeoutInSeconds=60 -redis.redisson.lockWatchdogTimeoutInSeconds=120 -#Setting to -1 disables DNS monitoring -redis.redisson.dnsMonitorIntervalInSeconds=60 -redis.redisson.idleConnectionTimeoutInSeconds=60 -redis.redisson.retryAttempts=10 - -# ECloud -ecloud.baseUrl= -ecloud.dpsBaseUrl= -ecloud.provider= -ecloud.username= -ecloud.password= - -#Validation parameters -validation.validationExternalSchemaZip= -validation.validationExternalSchemaRoot= -validation.validationExternalSchematronRoot= -validation.validationInternalSchemaZip= -validation.validationInternalSchemaRoot= -validation.validationInternalSchematronRoot= - -#Actuator -management.endpoint.health.probes.enabled=true -management.health.livenessState.enabled=true -management.health.readinessState.enabled=true - -#Elastic APM -elastic.apm.enabled=true -elastic.apm.recording=true -elastic.apm.instrument=true -elastic.apm.service_name=metis-core-local -elastic.apm.server_url= -elastic.apm.environment=local -elastic.apm.application_packages=eu.europeana -elastic.apm.log_level=ERROR -elastic.apm.capture_body=all -elastic.apm.capture_headers=true -elastic.apm.metrics_interval=5s - diff --git a/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt b/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt deleted file mode 100644 index 89f805227b..0000000000 --- a/metis-core/metis-core-rest/src/main/resources/default_transformation.xslt +++ /dev/null @@ -1,462 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - false - - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestDatasetController.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestDatasetController.java deleted file mode 100644 index bdc23e5c18..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestDatasetController.java +++ /dev/null @@ -1,1146 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.core.Is.is; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -import com.jayway.jsonpath.Configuration; -import com.jayway.jsonpath.JsonPath; -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetSearchView; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.dataset.DatasetXsltStringWrapper; -import eu.europeana.metis.core.exceptions.DatasetAlreadyExistsException; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoXsltFoundException; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.exception.RestResponseExceptionHandler; -import eu.europeana.metis.core.rest.utils.TestObjectFactory; -import eu.europeana.metis.core.rest.utils.TestUtils; -import eu.europeana.metis.core.service.DatasetService; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.springframework.http.MediaType; -import org.springframework.http.converter.StringHttpMessageConverter; -import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; - -class TestDatasetController { - - private static DatasetService datasetServiceMock; - private static AuthenticationClient authenticationClient; - private static MockMvc datasetControllerMock; - - @BeforeAll - static void setUp() { - datasetServiceMock = mock(DatasetService.class); - authenticationClient = mock(AuthenticationClient.class); - DatasetController datasetController = new DatasetController(datasetServiceMock, - authenticationClient); - datasetControllerMock = MockMvcBuilders - .standaloneSetup(datasetController) - .setControllerAdvice(new RestResponseExceptionHandler()) - .setMessageConverters(new MappingJackson2HttpMessageConverter(), - new MappingJackson2XmlHttpMessageConverter(), - new StringHttpMessageConverter(StandardCharsets.UTF_8)) - .build(); - } - - @AfterEach - void cleanUp() { - reset(datasetServiceMock); - reset(authenticationClient); - } - - private static MetisUserView getUserWithAccountRole(AccountRole accountRole) { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - doReturn(accountRole).when(metisUserView).getAccountRole(); - return metisUserView; - } - - @Test - void createDataset() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.createDataset(any(MetisUserView.class), any(Dataset.class))) - .thenReturn(dataset); - - datasetControllerMock.perform(post("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .accept(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(dataset))) - .andExpect(status().is(201)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.datasetName", is(TestObjectFactory.DATASETNAME))); - verify(datasetServiceMock, times(1)).createDataset(any(MetisUserView.class), any(Dataset.class)); - } - - @Test - void createDatasetInvalidUser() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(post("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .accept(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(dataset))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - verify(datasetServiceMock, times(0)).createDataset(any(MetisUserView.class), any(Dataset.class)); - } - - @Test - void createDataset_DatasetAlreadyExistsException_Returns409() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new DatasetAlreadyExistsException("Conflict")) - .when(datasetServiceMock).createDataset(any(MetisUserView.class), any(Dataset.class)); - - datasetControllerMock.perform(post("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(dataset))) - .andExpect(status().is(409)) - .andExpect(jsonPath("$.errorMessage", is("Conflict"))); - } - - @Test - void updateDataset_withValidData_Returns204() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXsltStringWrapper datasetXsltStringWrapper = new DatasetXsltStringWrapper(dataset, - ""); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - datasetControllerMock.perform(put("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(datasetXsltStringWrapper))) - .andExpect(status().is(204)) - .andExpect(content().string("")); - - verify(datasetServiceMock, times(1)) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - } - - @Test - void updateDataset_InvalidUser() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - datasetControllerMock.perform(put("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(dataset))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - } - - @Test - void updateDataset_noDatasetFound_Returns404() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXsltStringWrapper datasetXsltStringWrapper = new DatasetXsltStringWrapper(dataset, - ""); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new NoDatasetFoundException("Does not exist")).when(datasetServiceMock) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - datasetControllerMock.perform(put("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(datasetXsltStringWrapper))) - .andExpect(status().is(404)) - .andExpect(jsonPath("$.errorMessage", is("Does not exist"))); - - verify(datasetServiceMock, times(1)) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - } - - @Test - void updateDataset_BadContentException_Returns406() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXsltStringWrapper datasetXsltStringWrapper = new DatasetXsltStringWrapper(dataset, - ""); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new BadContentException("Bad Content")).when(datasetServiceMock) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - datasetControllerMock.perform(put("/datasets") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(datasetXsltStringWrapper))) - .andExpect(status().is(406)) - .andExpect(jsonPath("$.errorMessage", is("Bad Content"))); - - verify(datasetServiceMock, times(1)) - .updateDataset(any(MetisUserView.class), any(Dataset.class), anyString()); - } - - @Test - void deleteDataset() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - datasetControllerMock.perform( - delete(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(204)) - .andExpect(content().string("")); - - ArgumentCaptor datasetIdArgumentCaptor = ArgumentCaptor.forClass(String.class); - - verify(datasetServiceMock, times(1)) - .deleteDatasetByDatasetId(any(MetisUserView.class), datasetIdArgumentCaptor.capture()); - - assertEquals(Integer.toString(TestObjectFactory.DATASETID), datasetIdArgumentCaptor.getValue()); - } - - @Test - void deleteDatasetInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - datasetControllerMock.perform( - delete(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - verify(datasetServiceMock, times(0)) - .deleteDatasetByDatasetId(any(MetisUserView.class), anyString()); - } - - @Test - void deleteDataset_BadContentException_Returns406() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new BadContentException("Bad Content")).when(datasetServiceMock) - .deleteDatasetByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID)); - datasetControllerMock.perform( - delete(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .accept(MediaType.APPLICATION_JSON) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)) - .andExpect(jsonPath("$.errorMessage", is("Bad Content"))); - } - - - @Test - void getByDatasetId() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getDatasetByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn(dataset); - datasetControllerMock - .perform(get(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.datasetName", is(TestObjectFactory.DATASETNAME))) - .andExpect(jsonPath("$.datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))); - - ArgumentCaptor datasetIdArgumentCaptor = ArgumentCaptor.forClass(String.class); - verify(datasetServiceMock, times(1)) - .getDatasetByDatasetId(any(MetisUserView.class), datasetIdArgumentCaptor.capture()); - assertEquals(Integer.toString(TestObjectFactory.DATASETID), datasetIdArgumentCaptor.getValue()); - } - - @Test - void getByDatasetIdInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - datasetControllerMock - .perform(get(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getDatasetByDatasetId(any(MetisUserView.class), anyString()); - } - - @Test - void getByDatasetId_noDatasetFound_Returns404() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getDatasetByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenThrow(new NoDatasetFoundException("Does not exist")); - datasetControllerMock - .perform(get(String.format("/datasets/%s", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(status().is(404)) - .andExpect(jsonPath("$.errorMessage", is("Does not exist"))); - } - - @Test - void getDatasetXsltByDatasetId() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getDatasetXsltByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn(xsltObject); - datasetControllerMock - .perform( - get(String.format("/datasets/%s/xslt", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.xslt", is(xsltObject.getXslt()))) - .andExpect(jsonPath("$.datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))); - - ArgumentCaptor datasetIdArgumentCaptor = ArgumentCaptor.forClass(String.class); - verify(datasetServiceMock, times(1)) - .getDatasetXsltByDatasetId(any(MetisUserView.class), datasetIdArgumentCaptor.capture()); - assertEquals(Integer.toString(TestObjectFactory.DATASETID), datasetIdArgumentCaptor.getValue()); - } - - @Test - void getDatasetXsltByDatasetId_noDatasetFound_Returns404() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getDatasetXsltByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenThrow(new NoDatasetFoundException("Does not exist")); - datasetControllerMock - .perform( - get(String.format("/datasets/%s/xslt", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(404)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is("Does not exist"))); - } - - @Test - void getDatasetXsltByDatasetId_noXsltFound_Returns404() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getDatasetXsltByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenThrow(new NoXsltFoundException("Does not exist")); - datasetControllerMock - .perform( - get(String.format("/datasets/%s/xslt", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(404)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is("Does not exist"))); - } - - @Test - void getDatasetXsltByDatasetIdInvalidUser() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - when(datasetServiceMock - .getDatasetXsltByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn(xsltObject); - datasetControllerMock - .perform( - get(String.format("/datasets/%s/xslt", TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getDatasetXsltByDatasetId(any(MetisUserView.class), anyString()); - } - - @Test - void getXsltByXsltId() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - - when(datasetServiceMock.getDatasetXsltByXsltId(TestObjectFactory.XSLTID)) - .thenReturn(xsltObject); - datasetControllerMock - .perform(get(String.format("/datasets/xslt/%s", TestObjectFactory.XSLTID)) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType( - new MediaType(MediaType.TEXT_PLAIN.getType(), MediaType.TEXT_PLAIN.getSubtype(), - StandardCharsets.UTF_8))) - .andExpect(content().string(xsltObject.getXslt())); - - ArgumentCaptor xsltIdArgumentCaptor = ArgumentCaptor.forClass(String.class); - verify(datasetServiceMock, times(1)) - .getDatasetXsltByXsltId(xsltIdArgumentCaptor.capture()); - assertEquals(TestObjectFactory.XSLTID, xsltIdArgumentCaptor.getValue()); - } - - @Test - void getXsltByXsltId_NoXsltFound_404() throws Exception { - when(datasetServiceMock.getDatasetXsltByXsltId(TestObjectFactory.XSLTID)) - .thenThrow(new NoXsltFoundException("No xslt found")); - datasetControllerMock - .perform(get(String.format("/datasets/xslt/%s", TestObjectFactory.XSLTID)) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(404)); - - ArgumentCaptor xsltIdArgumentCaptor = ArgumentCaptor.forClass(String.class); - verify(datasetServiceMock, times(1)) - .getDatasetXsltByXsltId(xsltIdArgumentCaptor.capture()); - assertEquals(TestObjectFactory.XSLTID, xsltIdArgumentCaptor.getValue()); - } - - @Test - void createDefaultXslt() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.METIS_ADMIN); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - xsltObject.setId(new ObjectId(TestObjectFactory.XSLTID)); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - when(datasetServiceMock.createDefaultXslt(any(MetisUserView.class), anyString())) - .thenReturn(xsltObject); - datasetControllerMock - .perform(post("/datasets/xslt/default", TestObjectFactory.XSLTID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.TEXT_PLAIN) - .content(TestUtils.convertObjectToJsonBytes(xsltObject.getXslt()))) - .andExpect(status().is(201)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.xslt", is(xsltObject.getXslt()))) - .andExpect(jsonPath("$.datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))); - } - - @Test - void createDefaultXslt_Unauthorized() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - xsltObject.setId(new ObjectId(TestObjectFactory.XSLTID)); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - when(datasetServiceMock.createDefaultXslt(any(MetisUserView.class), anyString())) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - datasetControllerMock - .perform(post("/datasets/xslt/default", TestObjectFactory.XSLTID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.TEXT_PLAIN) - .content(TestUtils.convertObjectToJsonBytes(xsltObject.getXslt()))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void getLatestDefaultXslt() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt xsltObject = new DatasetXslt(dataset.getDatasetId(), - ""); - - when(datasetServiceMock.getLatestDefaultXslt()).thenReturn(xsltObject); - datasetControllerMock.perform(get("/datasets/xslt/default") - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType( - new MediaType(MediaType.TEXT_PLAIN.getType(), MediaType.TEXT_PLAIN.getSubtype(), - StandardCharsets.UTF_8))) - .andExpect(content().string(xsltObject.getXslt())); - - verify(datasetServiceMock, times(1)).getLatestDefaultXslt(); - } - - @Test - void getLatestDefaultXslt_NoXsltFound_404() throws Exception { - when(datasetServiceMock.getLatestDefaultXslt()) - .thenThrow(new NoXsltFoundException("No xslt found")); - datasetControllerMock.perform(get("/datasets/xslt/default") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(404)); - - verify(datasetServiceMock, times(1)).getLatestDefaultXslt(); - } - - @Test - void transformRecordsUsingLatestDatasetXslt() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - List listOfRecords = TestObjectFactory.createListOfRecords(5); - when(datasetServiceMock - .transformRecordsUsingLatestDatasetXslt(any(MetisUserView.class), anyString(), anyList())) - .thenReturn(listOfRecords); - datasetControllerMock - .perform(post("/datasets/{datasetId}/xslt/transform", - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON_VALUE) - .content(TestUtils.convertObjectToJsonBytes(listOfRecords))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$", hasSize(5))); - } - - @Test - void transformRecordsUsingLatestDatasetXslt_UserUnauthorizedException() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.WRONG_ACCESS_TOKEN)); - datasetControllerMock - .perform(post("/datasets/{datasetId}/xslt/transform", - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON_VALUE) - .content(TestUtils.convertObjectToJsonBytes(TestObjectFactory.createListOfRecords(5)))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.WRONG_ACCESS_TOKEN))); - } - - @Test - void transformRecordsUsingLatestDefaultXslt() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - List listOfRecords = TestObjectFactory.createListOfRecords(5); - when(datasetServiceMock - .transformRecordsUsingLatestDefaultXslt(any(MetisUserView.class), anyString(), anyList())) - .thenReturn(listOfRecords); - datasetControllerMock - .perform(post("/datasets/{datasetId}/xslt/transform/default", - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON_VALUE) - .content(TestUtils.convertObjectToJsonBytes(listOfRecords))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$", hasSize(5))); - } - - @Test - void transformRecordsUsingLatestDefaultXslt_UserUnauthorizedException() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.WRONG_ACCESS_TOKEN)); - datasetControllerMock - .perform(post("/datasets/{datasetId}/xslt/transform/default", - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON_VALUE) - .content(TestUtils.convertObjectToJsonBytes(TestObjectFactory.createListOfRecords(5)))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.WRONG_ACCESS_TOKEN))); - } - - @Test - void getByDatasetName() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getDatasetByDatasetName(metisUserView, TestObjectFactory.DATASETNAME)) - .thenReturn(dataset); - datasetControllerMock - .perform(get(String.format("/datasets/dataset_name/%s", TestObjectFactory.DATASETNAME)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.datasetName", is(TestObjectFactory.DATASETNAME))) - .andExpect(jsonPath("$.datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))); - - ArgumentCaptor datasetNameArgumentCaptor = ArgumentCaptor.forClass(String.class); - verify(datasetServiceMock, times(1)) - .getDatasetByDatasetName(any(MetisUserView.class), datasetNameArgumentCaptor.capture()); - assertEquals(TestObjectFactory.DATASETNAME, datasetNameArgumentCaptor.getValue()); - } - - @Test - void getByDatasetNameInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - datasetControllerMock - .perform(get(String.format("/datasets/dataset_name/%s", TestObjectFactory.DATASETNAME)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getDatasetByDatasetName(any(MetisUserView.class), anyString()); - } - - - @Test - void getByDatasetName_noDatasetFound_Returns404() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getDatasetByDatasetName(metisUserView, TestObjectFactory.DATASETNAME)) - .thenThrow(new NoDatasetFoundException("Does not exist")); - datasetControllerMock - .perform(get(String.format("/datasets/dataset_name/%s", TestObjectFactory.DATASETNAME)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(status().is(404)) - .andExpect(jsonPath("$.errorMessage", is("Does not exist"))); - } - - @Test - void getAllDatasetsByProvider() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - List datasetList = getDatasets(); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getAllDatasetsByProvider(metisUserView, "myProvider", 3)) - .thenReturn(datasetList); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/provider/myProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor provider = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .getAllDatasetsByProvider(any(MetisUserView.class), provider.capture(), page.capture()); - - assertEquals("myProvider", provider.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - @Test - void getAllDatasetsByProviderNegativeNextPage() throws Exception { - datasetControllerMock.perform(get("/datasets/provider/myProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)); - } - - @Test - void getAllDatasetsByProviderInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/provider/myProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getAllDatasetsByProvider(any(MetisUserView.class), anyString(), anyInt()); - } - - @Test - void getAllDatasetsByIntermediateProvider() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - List datasetList = getDatasets(); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock - .getAllDatasetsByIntermediateProvider(metisUserView, "myIntermediateProvider", 3)) - .thenReturn(datasetList); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/intermediate_provider/myIntermediateProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor provider = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .getAllDatasetsByIntermediateProvider(any(MetisUserView.class), provider.capture(), - page.capture()); - - assertEquals("myIntermediateProvider", provider.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - @Test - void getAllDatasetsByIntermediateProviderNegativeNextPage() throws Exception { - datasetControllerMock.perform(get("/datasets/intermediate_provider/myIntermediateProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)); - } - - @Test - void getAllDatasetsByIntermediateProviderInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/intermediate_provider/myIntermediateProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getAllDatasetsByIntermediateProvider(any(MetisUserView.class), anyString(), anyInt()); - } - - @Test - void getAllDatasetsByDataProvider() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - List datasetList = getDatasets(); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getAllDatasetsByDataProvider(metisUserView, "myDataProvider", 3)) - .thenReturn(datasetList); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/data_provider/myDataProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor provider = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .getAllDatasetsByDataProvider(any(MetisUserView.class), provider.capture(), page.capture()); - - assertEquals("myDataProvider", provider.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - @Test - void getAllDatasetsByDataProviderNegativeNextPage() throws Exception { - datasetControllerMock.perform(get("/datasets/data_provider/myDataProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)); - } - - @Test - void getAllDatasetsByDataProviderInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/data_provider/myDataProvider") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getAllDatasetsByDataProvider(any(MetisUserView.class), anyString(), anyInt()); - } - - @Test - void getAllDatasetsByOrganizationId() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - List datasetList = getDatasets(); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getAllDatasetsByOrganizationId(metisUserView, "myOrganizationId", 3)) - .thenReturn(datasetList); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/organization_id/myOrganizationId") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor provider = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .getAllDatasetsByOrganizationId(any(MetisUserView.class), provider.capture(), page.capture()); - - assertEquals("myOrganizationId", provider.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - @Test - void getAllDatasetsByOrganizationIdNegativeNextPage() throws Exception { - datasetControllerMock.perform(get("/datasets/organization_id/myOrganizationId") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)); - } - - @Test - void getAllDatasetsByOrganizationIdInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/organization_id/myOrganizationId") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getAllDatasetsByOrganizationId(any(MetisUserView.class), anyString(), anyInt()); - } - - @Test - void getAllDatasetsByOrganizationName() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - List datasetList = getDatasets(); - - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(datasetServiceMock.getAllDatasetsByOrganizationName(metisUserView, "myOrganizationName", 3)) - .thenReturn(datasetList); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/organization_name/myOrganizationName") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor provider = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .getAllDatasetsByOrganizationName(any(MetisUserView.class), provider.capture(), page.capture()); - - assertEquals("myOrganizationName", provider.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - @Test - void getAllDatasetsByOrganizationNameNegativeNextPage() throws Exception { - datasetControllerMock.perform(get("/datasets/organization_name/myOrganizationName") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(406)); - } - - @Test - void getAllDatasetsByOrganizationNameInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/organization_name/myOrganizationName") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(datasetServiceMock, times(0)) - .getAllDatasetsByOrganizationName(any(MetisUserView.class), anyString(), anyInt()); - } - - @Test - void getDatasetsCountries() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - MvcResult mvcResult = datasetControllerMock.perform(get("/datasets/countries") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)).andReturn(); - - String resultListOfCountries = mvcResult.getResponse().getContentAsString(); - Object document = Configuration.defaultConfiguration().jsonProvider() - .parse(resultListOfCountries); - - List> mapListOfCountries = JsonPath.read(document, "$[*]"); - assertEquals(Country.values().length, mapListOfCountries.size()); - assertEquals(mapListOfCountries.get(22).get("enum"), Country.values()[22].name()); - assertEquals(mapListOfCountries.get(22).get("name"), Country.values()[22].getName()); - assertEquals(mapListOfCountries.get(22).get("isoCode"), Country.values()[22].getIsoCode()); - } - - @Test - void getDatasetsCountriesInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/countries") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void getDatasetsLanguages() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when( - authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - MvcResult mvcResult = datasetControllerMock.perform(get("/datasets/languages") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)).andReturn(); - - String resultListOfLanguages = mvcResult.getResponse().getContentAsString(); - Object document = Configuration.defaultConfiguration().jsonProvider() - .parse(resultListOfLanguages); - - List> mapListOfLanguages = JsonPath.read(document, "$[*]"); - assertEquals(Language.values().length, mapListOfLanguages.size()); - assertEquals(mapListOfLanguages.get(10).get("enum"), - Language.getLanguageListSortedByName().get(10).name()); - assertEquals(mapListOfLanguages.get(10).get("name"), - Language.getLanguageListSortedByName().get(10).getName()); - } - - @Test - void getDatasetsLanguagesInvalidUser() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - - datasetControllerMock.perform(get("/datasets/languages") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void getDatasetSearch() throws Exception { - MetisUserView metisUserView = getUserWithAccountRole(AccountRole.EUROPEANA_DATA_OFFICER); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - when(datasetServiceMock.searchDatasetsBasedOnSearchString(metisUserView, "test", 3)) - .thenReturn(getDatasetSearchViews()); - when(datasetServiceMock.getDatasetsPerRequestLimit()).thenReturn(5); - - datasetControllerMock.perform(get("/datasets/search") - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("searchString", "test") - .param("nextPage", "3") - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(null))) - .andExpect(status().is(200)) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andExpect(jsonPath("$.results", hasSize(2))) - .andExpect(jsonPath("$.results[0].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 2)))); - - ArgumentCaptor searchString = ArgumentCaptor.forClass(String.class); - ArgumentCaptor page = ArgumentCaptor.forClass(Integer.class); - verify(datasetServiceMock, times(1)) - .searchDatasetsBasedOnSearchString(any(MetisUserView.class), searchString.capture(), page.capture()); - - assertEquals("test", searchString.getValue()); - assertEquals(3, page.getValue().intValue()); - } - - private List getDatasetSearchViews() { - List datasetSearchViews = new ArrayList<>(2); - final DatasetSearchView datasetSearchView1 = new DatasetSearchView(); - datasetSearchView1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetSearchView1.setDatasetName(TestObjectFactory.DATASETNAME + 1); - datasetSearchView1.setProvider("provider1"); - datasetSearchView1.setDataProvider("dataProvider1"); - datasetSearchView1.setLastExecutionDate(new Date()); - datasetSearchViews.add(datasetSearchView1); - - final DatasetSearchView datasetSearchView2 = new DatasetSearchView(); - datasetSearchView2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetSearchView2.setDatasetName(TestObjectFactory.DATASETNAME + 2); - datasetSearchView2.setProvider("provider2"); - datasetSearchView2.setDataProvider("dataProvider2"); - datasetSearchView2.setLastExecutionDate(new Date()); - datasetSearchViews.add(datasetSearchView2); - - return datasetSearchViews; - } - - private List getDatasets() { - List datasetList = new ArrayList<>(); - Dataset dataset1 = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetList.add(dataset1); - - Dataset dataset2 = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetList.add(dataset2); - - return datasetList; - } - - -} \ No newline at end of file diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestOrchestratorController.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestOrchestratorController.java deleted file mode 100644 index 45aa17daa9..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestOrchestratorController.java +++ /dev/null @@ -1,976 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static com.jayway.jsonassert.impl.matcher.IsCollectionWithSize.hasSize; -import static org.hamcrest.core.Is.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.WorkflowAlreadyExistsException; -import eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException; -import eu.europeana.metis.core.rest.ExecutionHistory; -import eu.europeana.metis.core.rest.ExecutionHistory.Execution; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability.PluginWithDataAvailability; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.rest.VersionEvolution; -import eu.europeana.metis.core.rest.VersionEvolution.VersionEvolutionStep; -import eu.europeana.metis.core.rest.exception.RestResponseExceptionHandler; -import eu.europeana.metis.core.rest.execution.details.WorkflowExecutionView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionAndDatasetView; -import eu.europeana.metis.core.rest.utils.TestObjectFactory; -import eu.europeana.metis.core.rest.utils.TestUtils; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.nio.charset.StandardCharsets; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.TimeZone; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentMatchers; -import org.mockito.Mockito; -import org.springframework.http.MediaType; -import org.springframework.http.converter.StringHttpMessageConverter; -import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-06 - */ -class TestOrchestratorController { - - private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat( - "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); - - static { - simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - } - - private static OrchestratorService orchestratorService; - private static MockMvc orchestratorControllerMock; - private static AuthenticationClient authenticationClient; - - @BeforeAll - static void setUp() { - orchestratorService = mock(OrchestratorService.class); - authenticationClient = mock(AuthenticationClient.class); - OrchestratorController orchestratorController = - new OrchestratorController(orchestratorService, authenticationClient); - orchestratorControllerMock = MockMvcBuilders - .standaloneSetup(orchestratorController) - .setControllerAdvice(new RestResponseExceptionHandler()) - .setMessageConverters(new MappingJackson2HttpMessageConverter(), - new MappingJackson2XmlHttpMessageConverter(), - new StringHttpMessageConverter(StandardCharsets.UTF_8)) - .build(); - } - - @AfterEach - void cleanUp() { - Mockito.reset(orchestratorService, authenticationClient); - } - - @Test - void createWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - orchestratorControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(201)) - .andExpect(content().string("")); - - verify(orchestratorService, times(1)) - .createWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - } - - @Test - void createWorkflow_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - orchestratorControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(orchestratorService, never()) - .createWorkflow(any(), anyString(), any(Workflow.class), any()); - } - - @Test - void createWorkflow_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(orchestratorService).createWorkflow(eq(metisUserView), any(), any(), isNull()); - orchestratorControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void createWorkflow_WorkflowAlreadyExistsException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - doThrow(new WorkflowAlreadyExistsException("Some error")).when(orchestratorService) - .createWorkflow(any(MetisUserView.class), anyString(), any(Workflow.class), any()); - orchestratorControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(409)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - - verify(orchestratorService, times(1)) - .createWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - } - - @Test - void updateWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - orchestratorControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(204)) - .andExpect(content().string("")); - - verify(orchestratorService, times(1)) - .updateWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - } - - @Test - void updateWorkflow_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - orchestratorControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - - verify(orchestratorService, never()) - .updateWorkflow(any(), anyString(), any(Workflow.class), any()); - } - - @Test - void updateWorkflow_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(orchestratorService) - .updateWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - orchestratorControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void updateWorkflow_NoWorkflowFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - doThrow(new NoWorkflowFoundException("Some error")).when(orchestratorService) - .updateWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - orchestratorControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(workflow))) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - - verify(orchestratorService, times(1)) - .updateWorkflow(eq(metisUserView), anyString(), any(Workflow.class), isNull()); - } - - @Test - void deleteWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - orchestratorControllerMock.perform(delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(204)) - .andExpect(content().string("")); - verify(orchestratorService, times(1)).deleteWorkflow(eq(metisUserView), anyString()); - } - - @Test - void deleteWorkflow_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - orchestratorControllerMock.perform(delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - verify(orchestratorService, never()).deleteWorkflow(any(), anyString()); - } - - @Test - void deleteWorkflow_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(orchestratorService).deleteWorkflow(eq(metisUserView), any()); - orchestratorControllerMock.perform(delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void getWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(orchestratorService.getWorkflow(eq(metisUserView), anyString())).thenReturn(workflow); - orchestratorControllerMock.perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.datasetId", is(workflow.getDatasetId()))); - - verify(orchestratorService, times(1)).getWorkflow(eq(metisUserView), anyString()); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(eq(metisUserView), anyString(), isNull(), isNull(), - anyInt())) - .thenReturn(workflowExecution); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(201)) - .andExpect(jsonPath("$.workflowStatus", is(WorkflowStatus.INQUEUE.name()))); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(eq(metisUserView), anyString(), isNull(), isNull(), - anyInt())) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_WorkflowExecutionAlreadyExistsException() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new WorkflowExecutionAlreadyExistsException("Some error")).when(orchestratorService) - .addWorkflowInQueueOfWorkflowExecutions(eq(metisUserView), anyString(), isNull(), isNull(), - anyInt()); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(409)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoDatasetFoundException() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new NoDatasetFoundException("Some error")).when(orchestratorService) - .addWorkflowInQueueOfWorkflowExecutions(eq(metisUserView), anyString(), isNull(), isNull(), - anyInt()); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoWorkflowFoundException() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new NoWorkflowFoundException("Some error")).when(orchestratorService) - .addWorkflowInQueueOfWorkflowExecutions(eq(metisUserView), anyString(), isNull(), isNull(), - anyInt()); - orchestratorControllerMock.perform( - post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_DATASETID_EXECUTE, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void cancelWorkflowExecution() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doNothing().when(orchestratorService).cancelWorkflowExecution(eq(metisUserView), anyString()); - orchestratorControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(204)) - .andExpect(content().string("")); - } - - @Test - void cancelWorkflowExecution_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - orchestratorControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void cancelWorkflowExecution_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(orchestratorService).cancelWorkflowExecution(eq(metisUserView), anyString()); - orchestratorControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void cancelWorkflowExecution_NoWorkflowExecutionFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new NoWorkflowExecutionFoundException("Some error")).when(orchestratorService) - .cancelWorkflowExecution(eq(metisUserView), anyString()); - orchestratorControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void getWorkflowExecutionByExecutionId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - when(orchestratorService.getWorkflowExecutionByExecutionId(eq(metisUserView), anyString())) - .thenReturn(workflowExecution); - orchestratorControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.workflowStatus", is(WorkflowStatus.RUNNING.name()))); - } - - @Test - void getLatestFinishedPluginWorkflowExecutionByDatasetIdIfPluginTypeAllowedForExecution() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - AbstractExecutablePlugin plugin = ExecutablePluginFactory - .createPlugin(new ValidationExternalPluginMetadata()); - plugin.setId("validation_external_id"); - when(orchestratorService.getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution( - metisUserView, Integer.toString(TestObjectFactory.DATASETID), - ExecutablePluginType.VALIDATION_EXTERNAL, - null)) - .thenReturn(plugin); - - orchestratorControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_ALLOWED_PLUGIN, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .param("pluginType", "VALIDATION_EXTERNAL") - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.pluginType", is(PluginType.VALIDATION_EXTERNAL.name()))); - } - - @Test - void getLatestFinishedPluginWorkflowExecutionByDatasetIdIfPluginTypeAllowedForExecution_HarvestingPlugin() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - when(orchestratorService - .getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution(metisUserView, - Integer.toString(TestObjectFactory.DATASETID), ExecutablePluginType.OAIPMH_HARVEST, - null)) - .thenReturn(null); - - orchestratorControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_ALLOWED_PLUGIN, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .param("pluginType", "OAIPMH_HARVEST") - .content("")) - .andExpect(status().is(200)); - } - - @Test - void getDatasetExecutionInformation() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - DatasetExecutionInformation datasetExecutionInformation = new DatasetExecutionInformation(); - datasetExecutionInformation.setLastHarvestedDate(new Date(1000)); - datasetExecutionInformation.setLastHarvestedRecords(100); - datasetExecutionInformation.setFirstPublishedDate(new Date(2000)); - datasetExecutionInformation.setLastPublishedDate(new Date(3000)); - datasetExecutionInformation.setLastPublishedRecords(100); - when(orchestratorService - .getDatasetExecutionInformation(metisUserView, Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn(datasetExecutionInformation); - - orchestratorControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_INFORMATION, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.lastHarvestedDate", - is(simpleDateFormat.format(datasetExecutionInformation.getLastHarvestedDate())))) - .andExpect(jsonPath("$.lastHarvestedRecords", - is(datasetExecutionInformation.getLastHarvestedRecords()))) - .andExpect(jsonPath("$.firstPublishedDate", - is(simpleDateFormat.format(datasetExecutionInformation.getFirstPublishedDate())))) - .andExpect(jsonPath("$.lastPublishedDate", - is(simpleDateFormat.format(datasetExecutionInformation.getLastPublishedDate())))) - .andExpect(jsonPath("$.lastPublishedRecords", - is(datasetExecutionInformation.getLastPublishedRecords()))); - } - - @Test - void getAllWorkflowExecutionsByDatasetId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - int listSize = 2; - ResponseListWrapper listOfWorkflowExecutions = new ResponseListWrapper<>(); - listOfWorkflowExecutions.setResultsAndLastPage( - TestObjectFactory.createListOfWorkflowExecutions(listSize + 1), - orchestratorService.getWorkflowExecutionsPerRequest(), 0); - - when(orchestratorService.getWorkflowExecutionsPerRequest()).thenReturn(listSize); - when(orchestratorService.getAllWorkflowExecutions(eq(metisUserView), anyString(), - ArgumentMatchers.anySet(), any(DaoFieldNames.class), anyBoolean(), anyInt())) - .thenReturn(listOfWorkflowExecutions); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowStatus", WorkflowStatus.INQUEUE.name()) - .param("nextPage", "") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.results", hasSize(listSize + 1))) - .andExpect( - jsonPath("$.results[0].datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))) - .andExpect(jsonPath("$.results[0].workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.nextPage").isNotEmpty()); - } - - @Test - void getAllWorkflowExecutionsByDatasetIdNegativeNextPage() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowStatus", WorkflowStatus.INQUEUE.name()) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(406)); - } - - @Test - void getAllWorkflowExecutions() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - int listSize = 2; - ResponseListWrapper listOfWorkflowExecutions = new ResponseListWrapper<>(); - listOfWorkflowExecutions.setResultsAndLastPage( - TestObjectFactory.createListOfWorkflowExecutions(listSize + 1), - orchestratorService.getWorkflowExecutionsPerRequest(), 0); - - when(orchestratorService.getWorkflowExecutionsPerRequest()).thenReturn(listSize); - when(orchestratorService.getAllWorkflowExecutions(eq(metisUserView), isNull(), - ArgumentMatchers.anySet(), any(DaoFieldNames.class), anyBoolean(), anyInt())) - .thenReturn(listOfWorkflowExecutions); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowStatus", WorkflowStatus.INQUEUE.name()) - .param("nextPage", "") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.results", hasSize(listSize + 1))) - .andExpect( - jsonPath("$.results[0].datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))) - .andExpect(jsonPath("$.results[0].workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.nextPage").isNotEmpty()); - } - - @Test - void getAllWorkflowExecutionsNegativeNextPage() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowStatus", WorkflowStatus.INQUEUE.name()) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(406)); - } - - @Test - void getWorkflowExecutionsOverview() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - final int pageSize = 2; - final int nextPage = 5; - final int pageCount = 3; - final ResponseListWrapper listOfWorkflowExecutionAndDatasetViews = - new ResponseListWrapper<>(); - listOfWorkflowExecutionAndDatasetViews.setResultsAndLastPage( - TestObjectFactory.createListOfExecutionOverviews(pageSize * pageCount), - orchestratorService.getWorkflowExecutionsPerRequest(), nextPage, pageCount); - - when(orchestratorService.getWorkflowExecutionsPerRequest()).thenReturn(pageSize); - when(orchestratorService - .getWorkflowExecutionsOverview(eq(metisUserView), isNull(), isNull(), isNull(), isNull(), - eq(nextPage), eq(pageCount))) - .thenReturn(listOfWorkflowExecutionAndDatasetViews); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_OVERVIEW) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "" + nextPage) - .param("pageCount", "" + pageCount) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.results", hasSize(pageSize * pageCount))) - .andExpect( - jsonPath("$.results[0].dataset.datasetId", - is(Integer.toString(TestObjectFactory.DATASETID)))) - .andExpect( - jsonPath("$.results[0].execution.workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.results[1].dataset.datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect( - jsonPath("$.results[1].execution.workflowStatus", is(WorkflowStatus.INQUEUE.name()))) - .andExpect(jsonPath("$.nextPage", is(nextPage + pageCount))) - .andExpect(jsonPath("$.listSize", is(pageSize * pageCount))); - } - - @Test - void getWorkflowExecutionsOverviewBadPaginationArguments() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_OVERVIEW) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(406)); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_OVERVIEW) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("pageCount", "0") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(406)); - } - - @Test - void testGetDatasetExecutionHistory() throws Exception { - - // Get the user - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create nonempty history - final Execution execution1 = new Execution(); - execution1.setWorkflowExecutionId("execution 1"); - execution1.setStartedDate(new Date(1)); - final Execution execution2 = new Execution(); - execution2.setWorkflowExecutionId("execution 2"); - execution2.setStartedDate(new Date(2)); - final ExecutionHistory resultNonEmpty = new ExecutionHistory(); - resultNonEmpty.setExecutions(Arrays.asList(execution1, execution2)); - - // Test happy flow with non-empty evolution - when(orchestratorService - .getDatasetExecutionHistory(metisUserView, "" + TestObjectFactory.DATASETID)) - .thenReturn(resultNonEmpty); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_HISTORY, - TestObjectFactory.DATASETID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.executions", hasSize(2))) - .andExpect(jsonPath("$.executions[0].workflowExecutionId", - is(execution1.getWorkflowExecutionId()))) - .andExpect(jsonPath("$.executions[0].startedDate", - is(simpleDateFormat.format(execution1.getStartedDate())))) - .andExpect(jsonPath("$.executions[1].workflowExecutionId", - is(execution2.getWorkflowExecutionId()))) - .andExpect(jsonPath("$.executions[1].startedDate", - is(simpleDateFormat.format(execution2.getStartedDate())))); - - // Test happy flow with empty evolution - final ExecutionHistory resultEmpty = new ExecutionHistory(); - resultEmpty.setExecutions(Collections.emptyList()); - when(orchestratorService - .getDatasetExecutionHistory(metisUserView, "" + TestObjectFactory.DATASETID)) - .thenReturn(resultEmpty); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_HISTORY, - TestObjectFactory.DATASETID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.executions", hasSize(0))); - - // Test for bad input - when(orchestratorService - .getDatasetExecutionHistory(metisUserView, "" + TestObjectFactory.DATASETID)) - .thenThrow(new NoDatasetFoundException("")); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_HISTORY, - TestObjectFactory.DATASETID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(404)); - - // Test for unauthorized user - doThrow(new UserUnauthorizedException("")).when(orchestratorService) - .getDatasetExecutionHistory(metisUserView, "" + TestObjectFactory.DATASETID); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_DATASET_DATASETID_HISTORY, - TestObjectFactory.DATASETID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(401)); - } - - @Test - void testGetExecutablePluginsWithDataAvailability() throws Exception { - - // Get the user - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create nonempty history - final PluginWithDataAvailability plugin1 = new PluginWithDataAvailability(); - plugin1.setPluginType(PluginType.OAIPMH_HARVEST); - plugin1.setCanDisplayRawXml(true); - final PluginWithDataAvailability plugin2 = new PluginWithDataAvailability(); - plugin2.setPluginType(PluginType.ENRICHMENT); - plugin2.setCanDisplayRawXml(false); - final PluginsWithDataAvailability resultNonEmpty = new PluginsWithDataAvailability(); - resultNonEmpty.setPlugins(Arrays.asList(plugin1, plugin2)); - - // Test happy flow with non-empty evolution - when(orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, TestObjectFactory.EXECUTIONID)) - .thenReturn(resultNonEmpty); - orchestratorControllerMock - .perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID_PLUGINS_DATA_AVAILABILITY, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.plugins", hasSize(2))) - .andExpect(jsonPath("$.plugins[0].pluginType", - is(plugin1.getPluginType().name()))) - .andExpect(jsonPath("$.plugins[0].canDisplayRawXml", - is(plugin1.isCanDisplayRawXml()))) - .andExpect(jsonPath("$.plugins[1].pluginType", - is(plugin2.getPluginType().name()))) - .andExpect(jsonPath("$.plugins[1].canDisplayRawXml", - is(plugin2.isCanDisplayRawXml()))); - - // Test happy flow with empty evolution - final PluginsWithDataAvailability resultEmpty = new PluginsWithDataAvailability(); - resultEmpty.setPlugins(Collections.emptyList()); - when(orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, TestObjectFactory.EXECUTIONID)) - .thenReturn(resultEmpty); - orchestratorControllerMock - .perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID_PLUGINS_DATA_AVAILABILITY, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.plugins", hasSize(0))); - - // Test for bad input - when(orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, TestObjectFactory.EXECUTIONID)) - .thenThrow(new NoWorkflowExecutionFoundException("")); - orchestratorControllerMock - .perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID_PLUGINS_DATA_AVAILABILITY, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(404)); - - // Test for unauthorized user - doThrow(new UserUnauthorizedException("")).when(orchestratorService) - .getExecutablePluginsWithDataAvailability(metisUserView, TestObjectFactory.EXECUTIONID); - orchestratorControllerMock - .perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EXECUTIONS_EXECUTIONID_PLUGINS_DATA_AVAILABILITY, - TestObjectFactory.EXECUTIONID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(401)); - } - - @Test - void testGetRecordEvolutionForVersion() throws Exception { - - // Get the user - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create nonempty evolution step - final VersionEvolutionStep step1 = new VersionEvolutionStep(); - step1.setFinishedTime(new Date(1)); - step1.setPluginType(ExecutablePluginType.OAIPMH_HARVEST); - step1.setWorkflowExecutionId("execution 1"); - final VersionEvolutionStep step2 = new VersionEvolutionStep(); - step2.setFinishedTime(new Date(2)); - step2.setPluginType(ExecutablePluginType.TRANSFORMATION); - step2.setWorkflowExecutionId("execution 2"); - final VersionEvolution resultNonEmpty = new VersionEvolution(); - resultNonEmpty.setEvolutionSteps(Arrays.asList(step1, step2)); - - // Test happy flow with non-empty evolution - final PluginType pluginType = PluginType.MEDIA_PROCESS; - when(orchestratorService - .getRecordEvolutionForVersion(metisUserView, TestObjectFactory.EXECUTIONID, pluginType)) - .thenReturn(resultNonEmpty); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EVOLUTION, TestObjectFactory.EXECUTIONID, - pluginType) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.evolutionSteps", hasSize(2))) - .andExpect( - jsonPath("$.evolutionSteps[0].workflowExecutionId", is(step1.getWorkflowExecutionId()))) - .andExpect(jsonPath("$.evolutionSteps[0].pluginType", is(step1.getPluginType().name()))) - .andExpect(jsonPath("$.evolutionSteps[0].finishedTime", - is((int) step1.getFinishedTime().getTime()))) - .andExpect( - jsonPath("$.evolutionSteps[1].workflowExecutionId", is(step2.getWorkflowExecutionId()))) - .andExpect(jsonPath("$.evolutionSteps[1].pluginType", is(step2.getPluginType().name()))) - .andExpect(jsonPath("$.evolutionSteps[1].finishedTime", - is((int) step2.getFinishedTime().getTime()))); - - // Test happy flow with empty evolution - final VersionEvolution resultEmpty = new VersionEvolution(); - resultEmpty.setEvolutionSteps(Collections.emptyList()); - when(orchestratorService - .getRecordEvolutionForVersion(metisUserView, TestObjectFactory.EXECUTIONID, pluginType)) - .thenReturn(resultEmpty); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EVOLUTION, TestObjectFactory.EXECUTIONID, - pluginType) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.evolutionSteps", hasSize(0))); - - // Test for bad input - when(orchestratorService - .getRecordEvolutionForVersion(metisUserView, TestObjectFactory.EXECUTIONID, pluginType)) - .thenThrow(new NoWorkflowExecutionFoundException("")); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EVOLUTION, TestObjectFactory.EXECUTIONID, - pluginType) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(404)); - - // Test for unauthorized user - doThrow(new UserUnauthorizedException("")).when(orchestratorService) - .getRecordEvolutionForVersion(metisUserView, TestObjectFactory.EXECUTIONID, pluginType); - orchestratorControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_EVOLUTION, TestObjectFactory.EXECUTIONID, - pluginType) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER)) - .andExpect(status().is(401)); - } -} diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestProxiesController.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestProxiesController.java deleted file mode 100644 index ee60bf01ba..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestProxiesController.java +++ /dev/null @@ -1,362 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static com.jayway.jsonassert.impl.matcher.IsCollectionWithSize.hasSize; -import static org.hamcrest.core.Is.is; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.rest.ListOfIds; -import eu.europeana.metis.core.rest.PaginatedRecordsResponse; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.RecordsResponse; -import eu.europeana.metis.core.rest.exception.RestResponseExceptionHandler; -import eu.europeana.metis.core.rest.stats.AttributeStatistics; -import eu.europeana.metis.core.rest.stats.NodePathStatistics; -import eu.europeana.metis.core.rest.stats.NodeValueStatistics; -import eu.europeana.metis.core.rest.stats.RecordStatistics; -import eu.europeana.metis.core.rest.utils.TestObjectFactory; -import eu.europeana.metis.core.service.ProxiesService; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.RestEndpoints; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.stream.Stream; -import org.hamcrest.core.IsNull; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-26 - */ -class TestProxiesController { - - private static ProxiesService proxiesService; - private static AuthenticationClient authenticationClient; - private static MockMvc proxiesControllerMock; - - @BeforeAll - static void setUp() { - proxiesService = mock(ProxiesService.class); - authenticationClient = mock(AuthenticationClient.class); - ProxiesController proxiesController = new ProxiesController(proxiesService, - authenticationClient); - proxiesControllerMock = MockMvcBuilders - .standaloneSetup(proxiesController) - .setControllerAdvice(new RestResponseExceptionHandler()) - .build(); - } - - @Test - void getExternalTaskLogs() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - int from = 1; - int to = 100; - List listOfSubTaskInfo = TestObjectFactory.createListOfSubTaskInfo(); - for (SubTaskInfo subTaskInfo : listOfSubTaskInfo) { - subTaskInfo.setAdditionalInformations(null); - } - when(proxiesService.getExternalTaskLogs(metisUserView, TestObjectFactory.TOPOLOGY_NAME, - TestObjectFactory.EXTERNAL_TASK_ID, from, to)).thenReturn(listOfSubTaskInfo); - - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_LOGS, - TestObjectFactory.TOPOLOGY_NAME, TestObjectFactory.EXTERNAL_TASK_ID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("from", Integer.toString(from)) - .param("to", Integer.toString(to)) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$[0].additionalInformations", is(IsNull.nullValue()))) - .andExpect(jsonPath("$[1].additionalInformations", is(IsNull.nullValue()))); - } - - @Test - void existsExternalTaskReport() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - when(proxiesService.existsExternalTaskReport(metisUserView, TestObjectFactory.TOPOLOGY_NAME, - TestObjectFactory.EXTERNAL_TASK_ID)).thenReturn(true); - - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_REPORT_EXISTS, - TestObjectFactory.TOPOLOGY_NAME, TestObjectFactory.EXTERNAL_TASK_ID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.existsExternalTaskReport", is(true))); - } - - @Test - void getExternalTaskReport() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - List listOfSubTaskInfo = TestObjectFactory.createListOfSubTaskInfo(); - for (SubTaskInfo subTaskInfo : listOfSubTaskInfo) { - subTaskInfo.setAdditionalInformations(null); - } - - TaskErrorsInfo taskErrorsInfo = TestObjectFactory.createTaskErrorsInfoListWithIdentifiers(2); - when(proxiesService.getExternalTaskReport(metisUserView, TestObjectFactory.TOPOLOGY_NAME, - TestObjectFactory.EXTERNAL_TASK_ID, 10)).thenReturn(taskErrorsInfo); - - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_REPORT, - TestObjectFactory.TOPOLOGY_NAME, TestObjectFactory.EXTERNAL_TASK_ID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("idsPerError", "10") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.id", is(TestObjectFactory.EXTERNAL_TASK_ID))) - .andExpect(jsonPath("$.errors", hasSize(taskErrorsInfo.getErrors().size()))) - .andExpect(jsonPath("$.errors[0].errorDetails", - hasSize(taskErrorsInfo.getErrors().get(0).getErrorDetails().size()))) - .andExpect(jsonPath("$.errors[1].errorDetails", - hasSize(taskErrorsInfo.getErrors().get(1).getErrorDetails().size()))); - } - - @Test - void getExternalTaskStatistics() throws Exception { - - // Create user and set authentication. - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create response object. - final NodeValueStatistics nodeValue = new NodeValueStatistics(); - nodeValue.setOccurrences(3); - nodeValue.setValue("node value"); - nodeValue.setAttributeStatistics(Collections.emptyList()); - final NodePathStatistics nodePath = new NodePathStatistics(); - nodePath.setxPath("node path"); - nodePath.setNodeValueStatistics(Collections.singletonList(nodeValue)); - final RecordStatistics record = new RecordStatistics(); - record.setTaskId(TestObjectFactory.EXTERNAL_TASK_ID); - record.setNodePathStatistics(Collections.singletonList(nodePath)); - - // Make the call and verify the result. - when(proxiesService.getExternalTaskStatistics(metisUserView, TestObjectFactory.TOPOLOGY_NAME, - TestObjectFactory.EXTERNAL_TASK_ID)).thenReturn(record); - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_STATISTICS, - TestObjectFactory.TOPOLOGY_NAME, TestObjectFactory.EXTERNAL_TASK_ID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON).content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.taskId", is(TestObjectFactory.EXTERNAL_TASK_ID))) - .andExpect(jsonPath("$.nodePathStatistics", hasSize(record.getNodePathStatistics().size()))) - .andExpect(jsonPath("$.nodePathStatistics[0].xPath", is(nodePath.getxPath()))) - .andExpect(jsonPath("$.nodePathStatistics[0].nodeValueStatistics", hasSize(nodePath.getNodeValueStatistics().size()))) - .andExpect(jsonPath("$.nodePathStatistics[0].nodeValueStatistics[0].value", is(nodeValue.getValue()))) - .andExpect(jsonPath("$.nodePathStatistics[0].nodeValueStatistics[0].occurrences", is((int) nodeValue.getOccurrences()))) - .andExpect(jsonPath("$.nodePathStatistics[0].nodeValueStatistics[0].attributeStatistics", hasSize(nodeValue.getAttributeStatistics().size()))); - } - - @Test - void getExternalTaskNodeStatistics() throws Exception { - - // Create user and set authentication. - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create response object. - final AttributeStatistics attribute1 = new AttributeStatistics(); - attribute1.setxPath("attribute path 1"); - attribute1.setValue("attribute value 1"); - attribute1.setOccurrences(1); - final AttributeStatistics attribute2 = new AttributeStatistics(); - attribute2.setxPath("attribute path 2"); - attribute2.setValue("attribute value 2"); - attribute2.setOccurrences(2); - final NodeValueStatistics nodeValue = new NodeValueStatistics(); - nodeValue.setOccurrences(3); - nodeValue.setValue("node value"); - nodeValue.setAttributeStatistics(Arrays.asList(attribute1, attribute2)); - final NodePathStatistics nodePath = new NodePathStatistics(); - nodePath.setxPath("node path"); - nodePath.setNodeValueStatistics(Collections.singletonList(nodeValue)); - - // Mock the proxiesService instance. - when(proxiesService.getAdditionalNodeStatistics(metisUserView, TestObjectFactory.TOPOLOGY_NAME, - TestObjectFactory.EXTERNAL_TASK_ID, nodePath.getxPath())).thenReturn(nodePath); - - // Make the call and verify the result. - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_TOPOLOGY_TASK_NODE_STATISTICS, - TestObjectFactory.TOPOLOGY_NAME, TestObjectFactory.EXTERNAL_TASK_ID) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nodePath", nodePath.getxPath())) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.xPath", is(nodePath.getxPath()))) - .andExpect(jsonPath("$.nodeValueStatistics", hasSize(nodePath.getNodeValueStatistics().size()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].value", is(nodeValue.getValue()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].occurrences", is((int) nodeValue.getOccurrences()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics", hasSize(nodeValue.getAttributeStatistics().size()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[0].xPath", is(attribute1.getxPath()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[0].value", is(attribute1.getValue()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[0].occurrences", is((int) attribute1.getOccurrences()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[1].xPath", is(attribute2.getxPath()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[1].value", is(attribute2.getValue()))) - .andExpect(jsonPath("$.nodeValueStatistics[0].attributeStatistics[1].occurrences", is((int) attribute2.getOccurrences()))); - } - - @Test - void getListOfFileContentsFromPluginExecution() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - ArrayList records = new ArrayList<>(); - Record record1 = new Record("ECLOUDID1", - ""); - Record record2 = new Record("ECLOUDID2", - ""); - records.add(record1); - records.add(record2); - PaginatedRecordsResponse recordsResponse = new PaginatedRecordsResponse(records, null); - - when(proxiesService.getListOfFileContentsFromPluginExecution(metisUserView, - TestObjectFactory.EXECUTIONID, ExecutablePluginType.TRANSFORMATION, null, 5)) - .thenReturn(recordsResponse); - - proxiesControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", PluginType.TRANSFORMATION.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.records[0].ecloudId", is(record1.getEcloudId()))) - .andExpect(jsonPath("$.records[0].xmlRecord", is(record1.getXmlRecord()))) - .andExpect(jsonPath("$.records[1].ecloudId", is(record2.getEcloudId()))) - .andExpect(jsonPath("$.records[1].xmlRecord", is(record2.getXmlRecord()))); - } - // TODO: add tests for lookupIdFromUISClient - - @Test - void testGetRecordEvolutionForVersion() throws Exception { - - // Get the user - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - - // Create nonempty ID list and result list. - final Record record1 = new Record("ID 1", "content 1"); - final Record record2 = new Record("ID 2", "content 2"); - final RecordsResponse output = new RecordsResponse(Arrays.asList(record1, record2)); - final List expectedInput = Stream.concat(Stream.of("UNKNOWN ID"), - output.getRecords().stream().map(Record::getEcloudId)).toList(); - - // Test happy flow with non-empty ID list - final ExecutablePluginType pluginType = ExecutablePluginType.MEDIA_PROCESS; - doAnswer(invocation -> { - final ListOfIds input = invocation.getArgument(3); - assertEquals(expectedInput, input.getIds()); - return output; - }).when(proxiesService).getListOfFileContentsFromPluginExecution(same(metisUserView), - eq(TestObjectFactory.EXECUTIONID), eq(pluginType), any()); - proxiesControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", pluginType.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("{\"ids\":[\"" + String.join("\",\"", expectedInput) + "\"]}")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.records", hasSize(2))) - .andExpect(jsonPath("$.records[0].ecloudId", is(record1.getEcloudId()))) - .andExpect(jsonPath("$.records[0].xmlRecord", is(record1.getXmlRecord()))) - .andExpect(jsonPath("$.records[1].ecloudId", is(record2.getEcloudId()))) - .andExpect(jsonPath("$.records[1].xmlRecord", is(record2.getXmlRecord()))); - - // Test happy flow with empty ID list - final RecordsResponse emptyOutput = new RecordsResponse(Collections.emptyList()); - doAnswer(invocation -> { - final ListOfIds input = invocation.getArgument(3); - assertTrue(input.getIds().isEmpty()); - return emptyOutput; - }).when(proxiesService).getListOfFileContentsFromPluginExecution(same(metisUserView), - eq(TestObjectFactory.EXECUTIONID), eq(pluginType), any()); - proxiesControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", pluginType.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("{\"ids\":[]}")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.records", hasSize(0))); - proxiesControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", pluginType.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("{}")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.records", hasSize(0))); - - // Test for bad input - when(proxiesService.getListOfFileContentsFromPluginExecution(same(metisUserView), - eq(TestObjectFactory.EXECUTIONID), eq(pluginType), any())) - .thenThrow(new NoWorkflowExecutionFoundException("")); - proxiesControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", pluginType.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("{}")) - .andExpect(status().is(404)); - - // Test for unauthorized user - doThrow(new UserUnauthorizedException("")).when(proxiesService) - .getListOfFileContentsFromPluginExecution(same(metisUserView), - eq(TestObjectFactory.EXECUTIONID), eq(pluginType), any()); - proxiesControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_PROXIES_RECORDS_BY_IDS) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("workflowExecutionId", TestObjectFactory.EXECUTIONID) - .param("pluginType", pluginType.name()) - .contentType(MediaType.APPLICATION_JSON) - .content("{}")) - .andExpect(status().is(401)); - } -} diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestScheduleWorkflowController.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestScheduleWorkflowController.java deleted file mode 100644 index 367cd7585b..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/controller/TestScheduleWorkflowController.java +++ /dev/null @@ -1,412 +0,0 @@ -package eu.europeana.metis.core.rest.controller; - -import static com.jayway.jsonassert.impl.matcher.IsCollectionWithSize.hasSize; -import static org.hamcrest.core.Is.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -import eu.europeana.metis.authentication.rest.client.AuthenticationClient; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoScheduledWorkflowFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.ScheduledWorkflowAlreadyExistsException; -import eu.europeana.metis.core.rest.exception.RestResponseExceptionHandler; -import eu.europeana.metis.core.rest.utils.TestObjectFactory; -import eu.europeana.metis.core.rest.utils.TestUtils; -import eu.europeana.metis.core.service.ScheduleWorkflowService; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.nio.charset.StandardCharsets; -import java.util.List; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.springframework.http.MediaType; -import org.springframework.http.converter.StringHttpMessageConverter; -import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-05 - */ -class TestScheduleWorkflowController { - - private static ScheduleWorkflowService scheduleWorkflowService; - private static MockMvc scheduleWorkflowControllerMock; - private static AuthenticationClient authenticationClient; - - @BeforeAll - static void setUp() { - scheduleWorkflowService = mock(ScheduleWorkflowService.class); - authenticationClient = mock(AuthenticationClient.class); - ScheduleWorkflowController scheduleWorkflowController = new ScheduleWorkflowController( - scheduleWorkflowService, authenticationClient); - scheduleWorkflowControllerMock = MockMvcBuilders - .standaloneSetup(scheduleWorkflowController) - .setControllerAdvice(new RestResponseExceptionHandler()) - .setMessageConverters(new MappingJackson2HttpMessageConverter(), - new MappingJackson2XmlHttpMessageConverter(), - new StringHttpMessageConverter(StandardCharsets.UTF_8)) - .build(); - } - - @AfterEach - void cleanUp() { - reset(scheduleWorkflowService); - reset(authenticationClient); - } - - @Test - void scheduleWorkflowExecution() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - scheduleWorkflowControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(201)) - .andExpect(content().string("")); - verify(authenticationClient, times(1)) - .getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER); - verify(scheduleWorkflowService, times(1)) - .scheduleWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - } - - @Test - void scheduleWorkflowExecution_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - scheduleWorkflowControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void scheduleWorkflowExecution_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(scheduleWorkflowService).scheduleWorkflow(any(), any()); - scheduleWorkflowControllerMock - .perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void scheduleWorkflowExecution_BadContentException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new BadContentException("Some error")).when(scheduleWorkflowService) - .scheduleWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(406)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void scheduleWorkflowExecution_ScheduledWorkflowAlreadyExistsException() - throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new ScheduledWorkflowAlreadyExistsException("Some error")).when(scheduleWorkflowService) - .scheduleWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(409)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void scheduleWorkflowExecution_NoWorkflowFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new NoWorkflowFoundException("Some error")).when(scheduleWorkflowService) - .scheduleWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void scheduleWorkflowExecution_NoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new NoDatasetFoundException("Some error")).when(scheduleWorkflowService) - .scheduleWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(post(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void getScheduledWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - when(scheduleWorkflowService.getScheduledWorkflowByDatasetId(any(MetisUserView.class), anyString())) - .thenReturn(scheduledWorkflow); - scheduleWorkflowControllerMock.perform( - get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.scheduleFrequence", is(ScheduleFrequence.ONCE.name()))); - verify(authenticationClient, times(1)) - .getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER); - verify(scheduleWorkflowService, times(1)) - .getScheduledWorkflowByDatasetId(any(MetisUserView.class), anyString()); - } - - @Test - void getAllScheduledWorkflows() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - int listSize = 2; - List listOfScheduledWorkflows = TestObjectFactory - .createListOfScheduledWorkflows(listSize + 1);//To get the effect of next page - - when(scheduleWorkflowService.getScheduledWorkflowsPerRequest()).thenReturn(listSize); - when(scheduleWorkflowService - .getAllScheduledWorkflows(any(MetisUserView.class), any(ScheduleFrequence.class), anyInt())) - .thenReturn(listOfScheduledWorkflows); - scheduleWorkflowControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(200)) - .andExpect(jsonPath("$.results", hasSize(listSize + 1))) - .andExpect( - jsonPath("$.results[0].datasetId", is(Integer.toString(TestObjectFactory.DATASETID)))) - .andExpect(jsonPath("$.results[0].scheduleFrequence", is(ScheduleFrequence.ONCE.name()))) - .andExpect(jsonPath("$.results[1].datasetId", - is(Integer.toString(TestObjectFactory.DATASETID + 1)))) - .andExpect(jsonPath("$.results[1].scheduleFrequence", is(ScheduleFrequence.ONCE.name()))) - .andExpect(jsonPath("$.nextPage").isNotEmpty()); - verify(authenticationClient, times(1)) - .getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER); - } - - @Test - void getAllScheduledWorkflowsNegativeNextPage() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - scheduleWorkflowControllerMock - .perform(get(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .param("nextPage", "-1") - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(406)); - } - - @Test - void updateScheduledWorkflow() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(204)) - .andExpect(content().string("")); - verify(authenticationClient, times(1)) - .getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER); - verify(scheduleWorkflowService, times(1)) - .updateScheduledWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - } - - @Test - void updateScheduledWorkflow_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void updateScheduledWorkflow_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory.createScheduledWorkflowObject(); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(scheduleWorkflowService).updateScheduledWorkflow(any(), any()); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void updateScheduledWorkflow_NoWorkflowFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - doThrow(new NoWorkflowFoundException("Some error")).when(scheduleWorkflowService) - .updateScheduledWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void updateScheduledWorkflow_NoScheduledWorkflowFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - doThrow(new NoScheduledWorkflowFoundException("Some error")).when(scheduleWorkflowService) - .updateScheduledWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(404)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void updateScheduledWorkflow_BadContentException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - doThrow(new BadContentException("Some error")).when(scheduleWorkflowService) - .updateScheduledWorkflow(any(MetisUserView.class), any(ScheduledWorkflow.class)); - scheduleWorkflowControllerMock.perform(put(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content(TestUtils.convertObjectToJsonBytes(scheduledWorkflow))) - .andExpect(status().is(406)) - .andExpect(content().string("{\"errorMessage\":\"Some error\"}")); - } - - @Test - void deleteScheduledWorkflowExecution() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - scheduleWorkflowControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(204)) - .andExpect(content().string("")); - verify(authenticationClient, times(1)) - .getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER); - verify(scheduleWorkflowService, times(1)) - .deleteScheduledWorkflow(any(MetisUserView.class), anyString()); - } - - - @Test - void deleteScheduledWorkflowExecution_Unauthenticated() throws Exception { - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)); - scheduleWorkflowControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } - - @Test - void deleteScheduledWorkflowExecution_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(authenticationClient.getUserByAccessTokenInHeader(TestObjectFactory.AUTHORIZATION_HEADER)) - .thenReturn(metisUserView); - doThrow(new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED)) - .when(scheduleWorkflowService).deleteScheduledWorkflow(any(), any()); - scheduleWorkflowControllerMock.perform( - delete(RestEndpoints.ORCHESTRATOR_WORKFLOWS_SCHEDULE_DATASETID, - Integer.toString(TestObjectFactory.DATASETID)) - .header("Authorization", TestObjectFactory.AUTHORIZATION_HEADER) - .contentType(MediaType.APPLICATION_JSON) - .content("")) - .andExpect(status().is(401)) - .andExpect(jsonPath("$.errorMessage", is(CommonStringValues.UNAUTHORIZED))); - } -} diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestObjectFactory.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestObjectFactory.java deleted file mode 100644 index 15aafba425..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestObjectFactory.java +++ /dev/null @@ -1,308 +0,0 @@ -package eu.europeana.metis.core.rest.utils; - -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; - -import eu.europeana.cloud.common.model.dps.ErrorDetails; -import eu.europeana.cloud.common.model.dps.RecordState; -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.Dataset.PublicationFitness; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.execution.details.WorkflowExecutionView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionAndDatasetView; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; -import org.bson.types.ObjectId; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-03-15 - */ -public class TestObjectFactory { - - public static final int DATASETID = 100; - public static final String XSLTID = "5a9821af34f04b794dcf63df"; - public static final String EXECUTIONID = "5a5dc67ba458bb00083d49e3"; - public static final String DATASETNAME = "datasetName"; - public static final String EMAIL = "user.metis@europeana.eu"; - public static final String AUTHORIZATION_HEADER = "Bearer 1234567890qwertyuiopasdfghjklQWE"; - public static final String TOPOLOGY_NAME = "topology_name"; - public static final long EXTERNAL_TASK_ID = 2_070_373_127_078_497_810L; - private static final int OCCURRENCES = 2; - - - private TestObjectFactory() { - } - - /** - * Create dummy workflow - * - * @return the created workflow - */ - public static Workflow createWorkflowObject() { - Workflow workflow = new Workflow(); - workflow.setDatasetId(Integer.toString(DATASETID)); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPluginMetadata.setUrl("http://example.com"); - oaipmhHarvestPluginMetadata.setEnabled(true); - ValidationExternalPluginMetadata validationExternalPluginMetadata = new ValidationExternalPluginMetadata(); - validationExternalPluginMetadata.setEnabled(true); - TransformationPluginMetadata transformationPluginMetadata = new TransformationPluginMetadata(); - transformationPluginMetadata.setEnabled(true); - ValidationInternalPluginMetadata validationInternalPluginMetadata = new ValidationInternalPluginMetadata(); - validationInternalPluginMetadata.setEnabled(true); - NormalizationPluginMetadata normalizationPluginMetadata = new NormalizationPluginMetadata(); - normalizationPluginMetadata.setEnabled(true); - LinkCheckingPluginMetadata linkCheckingPluginMetadata = new LinkCheckingPluginMetadata(); - linkCheckingPluginMetadata.setEnabled(true); - EnrichmentPluginMetadata enrichmentPluginMetadata = new EnrichmentPluginMetadata(); - enrichmentPluginMetadata.setEnabled(true); - - List abstractMetisPluginMetadata = new ArrayList<>(); - abstractMetisPluginMetadata.add(oaipmhHarvestPluginMetadata); - abstractMetisPluginMetadata.add(validationExternalPluginMetadata); - abstractMetisPluginMetadata.add(transformationPluginMetadata); - abstractMetisPluginMetadata.add(validationInternalPluginMetadata); - abstractMetisPluginMetadata.add(normalizationPluginMetadata); - abstractMetisPluginMetadata.add(linkCheckingPluginMetadata); - abstractMetisPluginMetadata.add(enrichmentPluginMetadata); - workflow.setMetisPluginsMetadata(abstractMetisPluginMetadata); - - return workflow; - } - - /** - * Create dummy workflow execution - * - * @return the created workflow execution - */ - public static WorkflowExecution createWorkflowExecutionObject() { - Dataset dataset = createDataset(DATASETNAME); - ArrayList abstractMetisPlugins = new ArrayList<>(); - AbstractMetisPlugin oaipmhHarvestPlugin = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - AbstractMetisPlugin validationExternalPlugin = ExecutablePluginFactory - .createPlugin(new ValidationExternalPluginMetadata()); - abstractMetisPlugins.add(validationExternalPlugin); - - WorkflowExecution workflowExecution = new WorkflowExecution(dataset, abstractMetisPlugins, 0); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setCreatedDate(new Date()); - - return workflowExecution; - } - - private static WorkflowExecution createWorkflowExecutionObject(Dataset dataset) { - WorkflowExecution workflowExecution = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setCreatedDate(new Date()); - - return workflowExecution; - } - - /** - * Create a list of dummy workflow executions. The dataset name will have a suffix number for each - * dataset. - * - * @param size the number of dummy workflow executions to create - * @return the created list - */ - public static List createListOfWorkflowExecutions(int size) { - return createExecutionsWithDatasets(size).stream().map(ExecutionDatasetPair::getExecution) - .map(execution -> new WorkflowExecutionView(execution, false, plugin -> true)) - .toList(); - } - - /** - * Create a list of dummy execution overviews. The dataset name will have a suffix number for each - * dataset. - * - * @param size the number of dummy execution overviews to create - * @return the created list - */ - public static List createListOfExecutionOverviews(int size) { - return createExecutionsWithDatasets(size).stream() - .map(pair -> new ExecutionAndDatasetView(pair.getExecution(), pair.getDataset())) - .toList(); - } - - private static List createExecutionsWithDatasets(int size) { - final List result = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - Dataset dataset = createDataset(String.format("%s%s", DATASETNAME, i)); - dataset.setId(new ObjectId(new Date(i))); - dataset.setDatasetId(Integer.toString(DATASETID + i)); - WorkflowExecution workflowExecution = createWorkflowExecutionObject(dataset); - workflowExecution.setId(new ObjectId()); - result.add(new ExecutionDatasetPair(dataset, workflowExecution)); - } - return result; - } - - /** - * Create a dummy scheduled workflow - * - * @return the created scheduled workflow - */ - public static ScheduledWorkflow createScheduledWorkflowObject() { - ScheduledWorkflow scheduledWorkflow = new ScheduledWorkflow(); - scheduledWorkflow.setDatasetId(Integer.toString(DATASETID)); - scheduledWorkflow.setPointerDate(new Date()); - scheduledWorkflow.setScheduleFrequence(ScheduleFrequence.ONCE); - scheduledWorkflow.setWorkflowPriority(0); - return scheduledWorkflow; - } - - /** - * Create a list of dummy scheduled workflows. The dataset name will have a suffix number for each - * dataset. - * - * @param size the number of dummy scheduled workflows to create - * @return the created list - */ - public static List createListOfScheduledWorkflows(int size) { - List scheduledWorkflows = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ScheduledWorkflow scheduledWorkflow = createScheduledWorkflowObject(); - scheduledWorkflow.setId(new ObjectId()); - scheduledWorkflow.setDatasetId(Integer.toString(DATASETID + i)); - scheduledWorkflows.add(scheduledWorkflow); - } - return scheduledWorkflows; - } - - /** - * Create a dummy dataset - * - * @param datasetName the dataset name to be used - * @return the created dataset - */ - public static Dataset createDataset(String datasetName) { - Dataset ds = new Dataset(); - ds.setEcloudDatasetId("NOT_CREATED_YET-f525f64c-fea0-44bf-8c56-88f30962734c"); - ds.setDatasetId(Integer.toString(DATASETID)); - ds.setDatasetName(datasetName); - final String organizationId = "1234567890"; - ds.setOrganizationId(organizationId); - ds.setOrganizationName("OrganizationName"); - ds.setProvider(organizationId); - ds.setIntermediateProvider(organizationId); - ds.setDataProvider(organizationId); - ds.setCreatedByUserId("userId"); - ds.setCreatedDate(new Date()); - ds.setUpdatedDate(new Date()); - ds.setReplacedBy("replacedBy"); - ds.setReplaces("12345"); - ds.setCountry(Country.GREECE); - ds.setLanguage(Language.AR); - ds.setDescription("description"); - ds.setPublicationFitness(PublicationFitness.PARTIALLY_FIT); - ds.setNotes("Notes"); - return ds; - } - - /** - * Create a dummy metis user - * - * @param email the email for the dummy user - * @return the created metis user - */ - public static MetisUserView createMetisUser(String email) { - MetisUserView metisUserView = spy(new MetisUserView()); - doReturn(email).when(metisUserView).getEmail(); - doReturn(AccountRole.EUROPEANA_DATA_OFFICER).when(metisUserView).getAccountRole(); - doReturn("Organization_12345").when(metisUserView).getOrganizationId(); - doReturn("OrganizationName").when(metisUserView).getOrganizationName(); - doReturn(true).when(metisUserView).isMetisUserFlag(); - doReturn("FirstName").when(metisUserView).getFirstName(); - doReturn("LastName").when(metisUserView).getLastName(); - doReturn("User_12345").when(metisUserView).getUserId(); - return metisUserView; - } - - /** - * Create a dummy sub task info - * - * @return the created sub task info - */ - public static List createListOfSubTaskInfo() { - - SubTaskInfo subTaskInfo1 = new SubTaskInfo(1, "some_resource_id1", RecordState.SUCCESS, "info", - "additional info", "europeanaId", 0L); - SubTaskInfo subTaskInfo2 = new SubTaskInfo(2, "some_resource_id2", RecordState.SUCCESS, "info", - "additional info", "europeanaId", 0L); - ArrayList subTaskInfos = new ArrayList<>(); - subTaskInfos.add(subTaskInfo1); - subTaskInfos.add(subTaskInfo2); - return subTaskInfos; - } - - /** - * Create a task errors info object, which contains a list of {@link TaskErrorInfo} objects. These - * will also contain a list of {@link ErrorDetails} that in turn contain dummy identifiers. - * - * @param numberOfErrorTypes the number of dummy error types - * @return the created task errors info - */ - public static TaskErrorsInfo createTaskErrorsInfoListWithIdentifiers(int numberOfErrorTypes) { - ArrayList taskErrorInfos = new ArrayList<>(); - for (int i = 0; i < numberOfErrorTypes; i++) { - TaskErrorInfo taskErrorInfo = new TaskErrorInfo("be39ef50-f77d-11e7-af0f-fa163e77119a", - String.format("Error%s", i), OCCURRENCES); - ArrayList errorDetails = new ArrayList<>(); - errorDetails.add(new ErrorDetails("identifier1", "error1")); - errorDetails.add(new ErrorDetails("identifier2", "error2")); - taskErrorInfo.setErrorDetails(errorDetails); - taskErrorInfos.add(taskErrorInfo); - } - return new TaskErrorsInfo(EXTERNAL_TASK_ID, taskErrorInfos); - } - - /** - * Create a dummy list of {@link Record}s - * - * @param numberOfRecords the number of records to create - * @return the created list of records - */ - public static List createListOfRecords(int numberOfRecords) { - List records = new ArrayList<>(numberOfRecords); - for (int i = 0; i < numberOfRecords; i++) { - String domain = String.format("http://some.domain.com/id/path/%s", i); - records.add(new Record(UUID.randomUUID().toString(), - "\n" - + "\n" - + "\t\n" - + "\t\n" - + "\n")); - } - return records; - } - -} diff --git a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestUtils.java b/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestUtils.java deleted file mode 100644 index 7ae83619bf..0000000000 --- a/metis-core/metis-core-rest/src/test/java/eu/europeana/metis/core/rest/utils/TestUtils.java +++ /dev/null @@ -1,27 +0,0 @@ -package eu.europeana.metis.core.rest.utils; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.io.IOException; - -/** - * Utility class with helpful methods for tests - */ -public final class TestUtils { - - private TestUtils() { - } - - /** - * Convert a java {@link Object} to a byte array. - * - * @param object the object to convert - * @return the byte array - * @throws IOException if an exception occurred during the conversion - */ - public static byte[] convertObjectToJsonBytes(Object object) throws IOException { - ObjectMapper mapper = new ObjectMapper(); - mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - return mapper.writeValueAsBytes(object); - } -} diff --git a/metis-core/metis-core-service/.gitignore b/metis-core/metis-core-service/.gitignore deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/metis-core/metis-core-service/pom.xml b/metis-core/metis-core-service/pom.xml deleted file mode 100644 index 5375320078..0000000000 --- a/metis-core/metis-core-service/pom.xml +++ /dev/null @@ -1,154 +0,0 @@ - - - 4.0.0 - - metis-core - eu.europeana.metis - 12.2 - - metis-core-service - - - - jakarta.xml.bind - jakarta.xml.bind-api - test - - - eu.europeana.metis - metis-authentication-common - ${project.version} - - - eu.europeana.metis - metis-core-common - ${project.version} - - - metis-transformation-service - eu.europeana.metis - ${project.version} - - - eu.europeana.cloud - ecloud-service-uis-rest-client-java - ${version.ecloud} - - - org.slf4j - slf4j-log4j12 - - - - - eu.europeana.cloud - ecloud-service-mcs-rest-client-java - ${version.ecloud} - - - org.slf4j - slf4j-log4j12 - - - - - org.apache.commons - commons-lang3 - - - org.springframework - spring-context - - - org.mockito - mockito-core - - - org.junit.jupiter - junit-jupiter-api - - - org.junit.jupiter - junit-jupiter-engine - - - org.springframework - spring-test - - - de.flapdoodle.embed - de.flapdoodle.embed.mongo - ${version.embedded.mongo} - test - - - org.wiremock - wiremock-standalone - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - - - org.springframework - spring-web - - - com.fasterxml.jackson.core - jackson-core - ${version.jackson} - - - com.fasterxml.jackson.core - jackson-databind - ${version.jackson} - - - commons-io - commons-io - - - com.rabbitmq - amqp-client - ${version.rabbitmq} - - - org.redisson - redisson - ${version.redisson} - - - org.awaitility - awaitility - ${version.awaitability} - test - - - org.jetbrains - annotations - - - org.mockito - mockito-junit-jupiter - test - - - - - - - org.springframework - spring-framework-bom - ${version.spring} - pom - import - - - - diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DataEvolutionUtils.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DataEvolutionUtils.java deleted file mode 100644 index 3f9778f7f9..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DataEvolutionUtils.java +++ /dev/null @@ -1,433 +0,0 @@ -package eu.europeana.metis.core.dao; - -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.Pagination; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.utils.CommonStringValues; -import java.util.ArrayDeque; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.EnumSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; - -/** - * This class is a utility class that can answer questions related to the history and evolution of the data. - */ -public class DataEvolutionUtils { - - private static final Set HARVEST_PLUGIN_GROUP = Collections.unmodifiableSet( - EnumSet.of(ExecutablePluginType.OAIPMH_HARVEST, ExecutablePluginType.HTTP_HARVEST)); - private static final Set PROCESS_PLUGIN_GROUP = Collections.unmodifiableSet( - EnumSet.of(ExecutablePluginType.VALIDATION_EXTERNAL, ExecutablePluginType.TRANSFORMATION, - ExecutablePluginType.VALIDATION_INTERNAL, ExecutablePluginType.NORMALIZATION, ExecutablePluginType.ENRICHMENT, - ExecutablePluginType.MEDIA_PROCESS, ExecutablePluginType.LINK_CHECKING)); - private static final Set INDEX_PLUGIN_GROUP = Collections.unmodifiableSet( - EnumSet.of(ExecutablePluginType.PREVIEW, ExecutablePluginType.PUBLISH)); - private static final Set ALL_EXCEPT_LINK_GROUP = Collections.unmodifiableSet( - EnumSet.complementOf(EnumSet.of(ExecutablePluginType.LINK_CHECKING))); - - private final WorkflowExecutionDao workflowExecutionDao; - - /** - * Constructor. - * - * @param workflowExecutionDao the workflow execution dao - */ - public DataEvolutionUtils(WorkflowExecutionDao workflowExecutionDao) { - this.workflowExecutionDao = workflowExecutionDao; - } - - /** - *

- * Compute the predecessor plugin for a new plugin of the given plugin type. - *

- * - *

- * This method first computes the candidate type(s) this predecessor can have according to the execution rules, based on the - * given target plugin type (using {@link #getPredecessorTypes(ExecutablePluginType)}). If no predecessor is required for this - * type, this method returns null. - *

- * - *

- * Then the method returns the plugin within the given workflow execution that: - *

    - *
  1. Has one of these candidate types,
  2. - *
  3. Is the latest plugin of its kind, and
  4. - *
  5. Was executed successfully.
  6. - *
- *

- * - *

- * Note that in this context, as mentioned above, a plugin that executed with only errors is - * still counted as successful. Also, there is no requirement that the latest - * successful harvest plugin is an ancestor of the resulting plugin. - *

- * - * @param pluginType the type of the new {@link ExecutablePluginType} that is to be executed. - * @param workflowExecution The workflow execution in which to look. - * @return the {@link AbstractExecutablePlugin} that the pluginType execution can use as a source. Can be null in case the given - * type does not require a predecessor. - */ - public static ExecutablePlugin computePredecessorPlugin(ExecutablePluginType pluginType, WorkflowExecution workflowExecution) { - - // If the plugin type does not need a predecessor we are done. - final Set predecessorTypes = getPredecessorTypes(pluginType); - if (predecessorTypes.isEmpty()) { - return null; - } - - // Find the latest successful plugin of one of these types. If none found, throw exception. - final List candidates = - workflowExecution.getMetisPlugins().stream() - .filter(AbstractExecutablePlugin.class::isInstance).map(plugin -> (AbstractExecutablePlugin) plugin) - .filter(plugin -> predecessorTypes.contains(plugin.getPluginMetadata().getExecutablePluginType())) - .filter(plugin -> plugin.getPluginStatus() == PluginStatus.FINISHED).collect(Collectors.toList()); - if (!candidates.isEmpty()) { - return candidates.get(candidates.size() - 1); - } - - // If no successful plugin found, throw exception. - throw new IllegalArgumentException(); - } - - /** - *

- * Compute the predecessor plugin for a new plugin of the given plugin type. - *

- * - *

- * This method first computes the candidate type(s) this predecessor can have according to the execution rules, based on the - * given target plugin type (using {@link #getPredecessorTypes(ExecutablePluginType)}). If no predecessor is required for this - * type, this method returns null. Null is also returned for a predecessor {@link ExecutablePluginType#PUBLISH} that has at - * least one successful execution and pluginType {@link ExecutablePluginType#DEPUBLISH}, for which we do not need to perform all - * the checks since the predecessor may have been superseded by an invalid plugin execution. The enforced predecessor type - * provides a way to override the computed candidate types (if there are any): if provided, it is the only candidate type, - * meaning that any resulting predecessor plugin will be of this type. - *

- * - *

- * Then the method returns the plugin in the database that: - *

    - *
  1. Has one of these candidate types, or the enforced predecessor type if provided,
  2. - *
  3. Is the latest plugin of its kind,
  4. - *
  5. Was executed successfully (with at least one successful record), and
  6. - *
  7. Has the latest successful harvest plugin as its ancestor.
  8. - *
- *

- * - *

- * Note that in this context, as mentioned above, a plugin that executed with only errors is - * not counted as successful. Also, it is required that the latest successful - * harvest plugin is an ancestor of the resulting plugin. - *

- * - * @param pluginType the type of the new {@link ExecutablePluginType} that is to be executed. - * @param enforcedPredecessorType If not null, overrides the predecessor type of the plugin. - * @param datasetId the dataset ID of the new plugin's dataset. - * @return the {@link ExecutablePlugin} that the pluginType execution will use as a source. Can be null in case the given type - * does not require a predecessor. - * @throws PluginExecutionNotAllowed In case a valid predecessor is required, but not found. - */ - public PluginWithExecutionId computePredecessorPlugin(ExecutablePluginType pluginType, - ExecutablePluginType enforcedPredecessorType, String datasetId) throws PluginExecutionNotAllowed { - - final PluginWithExecutionId predecessorPlugin; - final Set defaultPredecessorTypes = getPredecessorTypes(pluginType); - - if (pluginType == ExecutablePluginType.DEPUBLISH) { - // If a DEPUBLISH Operation is requested we don't link to the predecessor plugin. However, - // make sure there at least one successful exists (possibly invalidated by later reindex). - final PluginWithExecutionId successfulPublish = workflowExecutionDao.getLatestSuccessfulExecutablePlugin( - datasetId, EnumSet.of(ExecutablePluginType.PUBLISH), false); - final boolean hasAtLeastOneSuccessfulPlugin = Optional.ofNullable(successfulPublish) - .filter(DataEvolutionUtils::pluginHasSuccessfulRecords).isPresent(); - if (!hasAtLeastOneSuccessfulPlugin) { - throw new PluginExecutionNotAllowed(CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED); - } - predecessorPlugin = null; - } else if (defaultPredecessorTypes.isEmpty()) { - // If the plugin type does not need a predecessor (even the enforced one) we are done. - predecessorPlugin = null; - } else { - - // Determine which predecessor plugin types are permissible (list is never empty). - final Set predecessorTypes = Optional.ofNullable(enforcedPredecessorType) - .>map(EnumSet::of) - .orElse(defaultPredecessorTypes); - - // Find the latest successful harvest to compare with. If none exist, throw exception. - final PluginWithExecutionId latestHarvest = - Optional.ofNullable(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, HARVEST_PLUGIN_GROUP, true)) - .orElseThrow(() -> new PluginExecutionNotAllowed(CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED)); - - // Find the latest successful plugin of each type and filter on existence of successful records. - final Stream> latestSuccessfulPlugins = - predecessorTypes.stream() - .map(Collections::singleton) - .map(type -> workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, type, true)) - .filter(Objects::nonNull) - .filter(DataEvolutionUtils::pluginHasSuccessfulRecords); - - // Sort on finished state, so that the root check occurs as little as possible. - final Stream> sortedSuccessfulPlugins = latestSuccessfulPlugins.sorted( - Comparator.comparing( - plugin -> Optional.ofNullable(plugin.getPlugin().getFinishedDate()).orElseGet(() -> new Date(Long.MIN_VALUE)), - Comparator.reverseOrder())); - - // Find the first plugin that satisfies the root check. If none found, throw exception. - final Predicate> rootCheck = plugin -> getRootAncestor(plugin).equals( - latestHarvest); - predecessorPlugin = sortedSuccessfulPlugins.filter(rootCheck).findFirst() - .orElseThrow(() -> new PluginExecutionNotAllowed( - CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED)); - } - return predecessorPlugin; - } - - private static Boolean pluginHasSuccessfulRecords(PluginWithExecutionId plugin) { - final ExecutionProgress executionProgress = plugin.getPlugin().getExecutionProgress(); - return Optional.ofNullable(executionProgress) - .map(progress -> progress.getProcessedRecords() > progress.getErrors() || progress.getDeletedRecords() > 0) - .orElse(Boolean.FALSE); - } - - /** - * Obtains the root ancestor plugin of the given plugin. This returns the one ancestor plugin that does not itself have a - * predecessor. - * - * @param plugin The plugin for which to find the root ancestor. - * @return The root ancestor. Is not null. - */ - public PluginWithExecutionId getRootAncestor(PluginWithExecutionId plugin) { - final WorkflowExecution execution = workflowExecutionDao.getById(plugin.getExecutionId()); - final List> evolution = compileVersionEvolution(plugin.getPlugin(), execution); - if (evolution.isEmpty()) { - return new PluginWithExecutionId<>(plugin.getExecutionId(), plugin.getPlugin()); - } - return new PluginWithExecutionId<>(evolution.get(0).getRight(), evolution.get(0).getLeft()); - } - - /** - * This method determines what plugin types a plugin of the given type can be based on. This means that the given type can only - * occur after one of the returned base types. - * - * @param pluginType The plugin type for which to return the base types. - * @return The base types of the given plugin type: those plugin types that a plugin of the given type can be based on. Cannot - * be null, but can be the empty set in case the plugin type requires no predecessor. - */ - public static Set getPredecessorTypes(ExecutablePluginType pluginType) { - return switch (pluginType) { - case VALIDATION_EXTERNAL -> HARVEST_PLUGIN_GROUP; - case TRANSFORMATION -> EnumSet.of(ExecutablePluginType.VALIDATION_EXTERNAL); - case VALIDATION_INTERNAL -> EnumSet.of(ExecutablePluginType.TRANSFORMATION); - case NORMALIZATION -> EnumSet.of(ExecutablePluginType.VALIDATION_INTERNAL); - case ENRICHMENT -> EnumSet.of(ExecutablePluginType.NORMALIZATION); - case MEDIA_PROCESS -> EnumSet.of(ExecutablePluginType.ENRICHMENT); - case PREVIEW -> EnumSet.of(ExecutablePluginType.MEDIA_PROCESS); - case PUBLISH -> EnumSet.of(ExecutablePluginType.PREVIEW); - case LINK_CHECKING -> ALL_EXCEPT_LINK_GROUP; - case DEPUBLISH, HTTP_HARVEST, OAIPMH_HARVEST -> Collections.emptySet(); - }; - } - - /** - * Get the evolution of the records from when they were first imported until (and excluding) the target version. - * - * @param targetPlugin The target for compiling the evolution: the result will lead to, but not include, this plugin. - * @param targetPluginExecution The execution in which this target plugin may be found. - * @return The evolution. - */ - public List> compileVersionEvolution(MetisPlugin targetPlugin, - WorkflowExecution targetPluginExecution) { - - // Loop backwards to find the plugin. Don't add the first plugin to the result list. - Pair currentExecutionAndPlugin = new ImmutablePair<>(targetPlugin, targetPluginExecution); - final ArrayDeque> evolutionSteps = new ArrayDeque<>(); - while (true) { - - // Move to the previous execution: stop when we have none or it is not executable. - currentExecutionAndPlugin = getPreviousExecutionAndPlugin(currentExecutionAndPlugin.getLeft(), - currentExecutionAndPlugin.getRight().getDatasetId()); - if (currentExecutionAndPlugin == null || !(currentExecutionAndPlugin.getLeft() instanceof ExecutablePlugin)) { - break; - } - - // Add step to the beginning of the list. - evolutionSteps.addFirst( - new ImmutablePair<>((ExecutablePlugin) currentExecutionAndPlugin.getLeft(), currentExecutionAndPlugin.getRight())); - } - - // Done - return new ArrayList<>(evolutionSteps); - } - - /** - * Returns the plugin that is the predecessor from the given plugin - * - * @param plugin The given plugin to find its predecessor, if any - * @param datasetId The dataset id from which we want to find the predecessor plugin - * @return The predecessor plugin from the give one, otherwise null if there is none - */ - public Pair getPreviousExecutionAndPlugin(MetisPlugin plugin, String datasetId) { - - // Check whether we are at the end of the chain. - final ExecutedMetisPluginId previousPluginId = ExecutedMetisPluginId.forPredecessor(plugin); - if (previousPluginId == null) { - return null; - } - - // Obtain the previous execution and plugin. - final WorkflowExecution previousExecution = workflowExecutionDao.getByTaskExecution(previousPluginId, datasetId); - final AbstractMetisPlugin previousPlugin = previousExecution == null ? null - : previousExecution.getMetisPluginWithType(previousPluginId.getPluginType()).orElse(null); - if (previousExecution == null || previousPlugin == null) { - return null; - } - - // Done - return new ImmutablePair<>(previousPlugin, previousExecution); - } - - List> getPublishOperationsSortedInversely(String datasetId) { - - // Get all workflows with finished publish plugins (theoretically we can't quite rely on order). - final Pagination pagination = workflowExecutionDao.createPagination(0, null, true); - final ResultList executionsWithPublishOperations = workflowExecutionDao.getWorkflowExecutionsOverview( - Set.of(datasetId), Set.of(PluginStatus.FINISHED), Set.of(PluginType.PUBLISH), null, null, pagination); - - // Extract all (finished) publish plugins inversely sorted by started date (most recent first). - final List> publishOperations = new ArrayList<>(); - executionsWithPublishOperations.getResults().stream().map(ExecutionDatasetPair::getExecution).forEach( - execution -> execution.getMetisPlugins().stream().filter(IndexToPublishPlugin.class::isInstance) - .map(IndexToPublishPlugin.class::cast) - .map(plugin -> new PluginWithExecutionId<>(execution.getId().toString(), plugin)) - .forEach(publishOperations::add)); - final Comparator> comparator = Comparator.comparing( - pair -> pair.getPlugin().getStartedDate()); - publishOperations.sort(comparator.reversed()); - - // Done - return publishOperations; - } - - /** - *

- * This method returns a sequence of published harvest increments. This is a list of successive published harvests that must - * start with the most recent full published harvest and then includes all incremental published harvests (if any) that came - * after that. - *

- *

- * Note that a published harvest is a harvest that resulted in a index to publish (i.e. any harvest that is the root predecessor - * of an index to publish). - *

- * - * @param datasetId The dataset ID for which to obtain the chain. - * @return The chain, in the form of plugin-execution pairs that are ordered chronologically. Is never null, but can be empty if - * no such chain exists (i.e. the dataset does not have a published harvest or we find an index after the last full harvest that - * is invalid or did somehow not originate from a harvest). - */ - public List> getPublishedHarvestIncrements(String datasetId) { - - // Get all publish operations sorted inversely - final var allPublishOperations = getPublishOperationsSortedInversely(datasetId); - - // Compile a list of all associated harvests (that led to one of these publish operations). - // Note: we assume that workflows don't cross each other (i.e. an earlier publish cannot have a - // later harvest). We stop when we find a full harvest (the latest full harvest). - boolean fullHarvestFound = false; - final Map> resultHarvests = new LinkedHashMap<>( - allPublishOperations.size()); - for (PluginWithExecutionId publishOperation : allPublishOperations) { - - // If the publish is not available, we have detected an anomaly. We are done. - if (MetisPlugin.getDataStatus(publishOperation.getPlugin()) != DataStatus.VALID - && MetisPlugin.getDataStatus(publishOperation.getPlugin()) != DataStatus.DEPRECATED) { - return Collections.emptyList(); - } - - // Get the root harvest and add it to the map. - final PluginWithExecutionId rootHarvest = getRootAncestor(publishOperation); - - // If the root harvest is not a harvest, we have detected an anomaly. We are done. - if (!HARVEST_PLUGIN_GROUP.contains(rootHarvest.getPlugin().getPluginMetadata().getExecutablePluginType())) { - return Collections.emptyList(); - } - - // Add the root harvest to the result (overwrite if already there). - resultHarvests.put(ExecutedMetisPluginId.forPlugin(rootHarvest.getPlugin()), rootHarvest); - - // If the root harvest is a full harvest, we are done. - fullHarvestFound = !(isIncrementalHarvest(rootHarvest)); - if (fullHarvestFound) { - break; - } - } - - // Done. Sort and return the harvests in the right order (just reversing the result). - if (!fullHarvestFound) { - return Collections.emptyList(); - } - final List> result = new ArrayList<>(resultHarvests.values()); - Collections.reverse(result); - return result; - } - - private static boolean isIncrementalHarvest(PluginWithExecutionId rootHarvest) { - return (rootHarvest.getPlugin().getPluginMetadata() instanceof AbstractHarvestPluginMetadata abstractHarvestPluginMetadata) - && abstractHarvestPluginMetadata.isIncrementalHarvest(); - } - - /** - * @return The plugin types that are of the 'harvesting' kind: they can occur at the beginning of workflows and don't need - * another plugin type as base. - */ - public static Set getHarvestPluginGroup() { - return HARVEST_PLUGIN_GROUP; - } - - /** - * @return The plugin types that are of the 'processing' kind. - */ - public static Set getProcessPluginGroup() { - return PROCESS_PLUGIN_GROUP; - } - - /** - * @return The plugin types that are of the 'indexing' kind. - */ - public static Set getIndexPluginGroup() { - return INDEX_PLUGIN_GROUP; - } - - /** - * @return The plugin types that are of the 'link checking' kind. - */ - public static Set getAllExceptLinkGroup() { - return ALL_EXCEPT_LINK_GROUP; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java deleted file mode 100644 index 5a3851c275..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetDao.java +++ /dev/null @@ -1,446 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_ID; -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_NAME; -import static eu.europeana.metis.core.common.DaoFieldNames.DATA_PROVIDER; -import static eu.europeana.metis.core.common.DaoFieldNames.ID; -import static eu.europeana.metis.core.common.DaoFieldNames.PROVIDER; -import static eu.europeana.metis.mongo.utils.MorphiaUtils.getListOfQueryRetryable; -import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptions; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; - -import dev.morphia.UpdateOptions; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.Sort; -import dev.morphia.query.filters.Filter; -import dev.morphia.query.filters.Filters; -import dev.morphia.query.updates.UpdateOperator; -import dev.morphia.query.updates.UpdateOperators; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.service.mcs.exception.DataSetAlreadyExistsException; -import eu.europeana.cloud.service.mcs.exception.MCSException; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetIdSequence; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.exception.ExternalTaskException; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.function.UnaryOperator; -import java.util.regex.Pattern; -import org.apache.commons.lang3.StringUtils; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Repository; - -/** - * Dataset Access Object for datasets using Mongo. It also contains the {@link DataSetServiceClient} which is used to access - * functionality of the ECloud datasets. - */ -@Repository -public class DatasetDao implements MetisDao { - - private static final Logger LOGGER = LoggerFactory.getLogger(DatasetDao.class); - private int datasetsPerRequest = RequestLimits.DATASETS_PER_REQUEST.getLimit(); - - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - private final DataSetServiceClient ecloudDataSetServiceClient; - private String ecloudProvider; // Use getter and setter for this field! - - /** - * Constructs the DAO - *

Initialize {@link #ecloudProvider} using the setter class. - * Use setter for {@link #setDatasetsPerRequest(int)} to overwrite the default value

- * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} used to access Mongo - * @param ecloudDataSetServiceClient {@link DataSetServiceClient} to access the ecloud dataset functionality - */ - @Autowired - public DatasetDao(MorphiaDatastoreProvider morphiaDatastoreProvider, - DataSetServiceClient ecloudDataSetServiceClient) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - this.ecloudDataSetServiceClient = ecloudDataSetServiceClient; - } - - /** - * Create a dataset in the database - * - * @param dataset {@link Dataset} to be created - * @return the {@link ObjectId} as String - */ - @Override - public Dataset create(Dataset dataset) { - final ObjectId objectId = Optional.ofNullable(dataset.getId()).orElseGet(ObjectId::new); - dataset.setId(objectId); - Dataset datasetSaved = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(dataset)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "Dataset with datasetId: '{}', datasetName: '{}' and OrganizationId: '{}' created in Mongo", - CRLF_PATTERN.matcher(dataset.getDatasetId()).replaceAll(""), - CRLF_PATTERN.matcher(dataset.getDatasetName()).replaceAll(""), - CRLF_PATTERN.matcher(dataset.getOrganizationId()).replaceAll("")); - } - return datasetSaved; - } - - /** - * Update a dataset in the database - * - * @param dataset {@link Dataset} to be updated - * @return the {@link ObjectId} as String - */ - @Override - public String update(Dataset dataset) { - Dataset datasetSaved = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(dataset)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "Dataset with datasetId: '{}', datasetName: '{}' and OrganizationId: '{}' updated in Mongo", - CRLF_PATTERN.matcher(dataset.getDatasetId()).replaceAll(""), - CRLF_PATTERN.matcher(dataset.getDatasetName()).replaceAll(""), - CRLF_PATTERN.matcher(dataset.getOrganizationId()).replaceAll("")); - } - return datasetSaved == null ? null : datasetSaved.getId().toString(); - } - - /** - * Get a dataset by {@link ObjectId} String. - * - * @param id the {@link ObjectId} String to search with - * @return {@link Dataset} - */ - @Override - public Dataset getById(String id) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq(ID.getFieldName(), new ObjectId(id))).first()); - } - - /** - * Delete a dataset using its datasetId. - * - * @param dataset {@link Dataset} containing the datasetId to be used for delete - * @return always true - */ - @Override - public boolean delete(Dataset dataset) { - retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), dataset.getDatasetId())).delete()); - LOGGER.debug( - "Dataset with datasetId: '{}', datasetName: '{}' and OrganizationId: '{}' deleted in Mongo", - dataset.getDatasetId(), dataset.getDatasetName(), dataset.getOrganizationId()); - return true; - } - - /** - * Delete a dataset using a datasetId. - * - * @param datasetId the identifier used to delete a dataset from the database - * @return always true - */ - public boolean deleteByDatasetId(String datasetId) { - Dataset dataset = new Dataset(); - dataset.setDatasetId(datasetId); - return delete(dataset); - } - - /** - * Get a dataset using a datasetName - * - * @param datasetName the String to search for - * @return {@link Dataset} or null - */ - public Dataset getDatasetByDatasetName(String datasetName) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq(DATASET_NAME.getFieldName(), datasetName)).first()); - } - - /** - * Get a dataset using a datasetId - * - * @param datasetId the String to search for - * @return {@link Dataset} or null - */ - public Dataset getDatasetByDatasetId(String datasetId) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)).first()); - } - - /** - * Get a dataset using an organizationId and datasetName - * - * @param organizationId the organizationId - * @param datasetName the datasetName - * @return {@link Dataset} or null - */ - public Dataset getDatasetByOrganizationIdAndDatasetName(String organizationId, - String datasetName) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq("organizationId", organizationId)) - .filter(Filters.eq(DATASET_NAME.getFieldName(), datasetName))).first(); - } - - /** - * Get all datasets using the provider field. - * - * @param provider the provider string used to find the datasets - * @param nextPage the nextPage positive number - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByProvider(String provider, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("provider", provider)); - final FindOptions findOptions = new FindOptions().skip(nextPage * getDatasetsPerRequest()) - .limit(getDatasetsPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Get all datasets using the intermediateProvider field. - * - * @param intermediateProvider the intermediateProvider string used to find the datasets - * @param nextPage the nextPage positive number - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByIntermediateProvider(String intermediateProvider, - int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("intermediateProvider", intermediateProvider)); - final FindOptions findOptions = new FindOptions().skip(nextPage * getDatasetsPerRequest()) - .limit(getDatasetsPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Get all datasets using the dataProvider field. - * - * @param dataProvider the dataProvider string used to find the datasets - * @param nextPage the nextPage positive number - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByDataProvider(String dataProvider, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("dataProvider", dataProvider)); - final FindOptions findOptions = new FindOptions().skip(nextPage * getDatasetsPerRequest()) - .limit(getDatasetsPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Get all datasets using the organizationName field. - * - * @param organizationName the organizationName string used to find the datasets - * @param nextPage the nextPage positive number - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByOrganizationName(String organizationName, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("organizationName", organizationName)); - final FindOptions findOptions = new FindOptions().skip(nextPage * getDatasetsPerRequest()) - .limit(getDatasetsPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Get all datasets using the organizationId field, using pagination. - * - * @param organizationId the organizationId string used to find the datasets - * @param nextPage the nextPage positive number - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByOrganizationId(String organizationId, int nextPage) { - return getAllDatasetsByOrganizationId(organizationId, - options -> options.skip(nextPage * getDatasetsPerRequest()).limit(getDatasetsPerRequest())); - } - - /** - * Get all datasets using the organizationId field. - * - * @param organizationId the organizationId string used to find the datasets - * @return {@link List} of {@link Dataset} - */ - public List getAllDatasetsByOrganizationId(String organizationId) { - return getAllDatasetsByOrganizationId(organizationId, UnaryOperator.identity()); - } - - private List getAllDatasetsByOrganizationId(String organizationId, - UnaryOperator options) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("organizationId", organizationId)); - return getListOfQueryRetryable(query, options.apply(new FindOptions())); - } - - /** - * Find the next in sequence identifier that can be used as a datasetId for a {@link Dataset}. - *

It will bypass any existing datasetId's in the system and will give the first available - * after that, otherwise it's simply an incremental identifier

- * - * @return the available identifier to be used further for a creation of a {@link Dataset} - */ - public int findNextInSequenceDatasetId() { - DatasetIdSequence datasetIdSequence = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DatasetIdSequence.class).first()); - Dataset dataset; - do { - datasetIdSequence.setSequence(datasetIdSequence.getSequence() + 1); - dataset = this.getDatasetByDatasetId(Integer.toString(datasetIdSequence.getSequence())); - } while (dataset != null); - Query updateQuery = morphiaDatastoreProvider.getDatastore() - .find(DatasetIdSequence.class) - .filter( - Filters.eq(ID.getFieldName(), datasetIdSequence.getId())); - final UpdateOperator updateOperator = UpdateOperators - .set("sequence", datasetIdSequence.getSequence()); - - retryableExternalRequestForNetworkExceptions( - () -> updateQuery.update(new UpdateOptions(), updateOperator)); - return datasetIdSequence.getSequence(); - } - - /** - * Get a list of datasets to redirect from - * - * @param datasetIdToRedirectFrom the dataset ids to redirect from - * @return the list of dataset to redirect from - */ - public List getAllDatasetsByDatasetIdsToRedirectFrom(String datasetIdToRedirectFrom) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - query.filter(Filters.eq("datasetIdsToRedirectFrom", datasetIdToRedirectFrom)); - return getListOfQueryRetryable(query, new FindOptions()); - } - - public int getDatasetsPerRequest() { - synchronized (this) { - return datasetsPerRequest; - } - } - - public void setDatasetsPerRequest(int datasetsPerRequest) { - synchronized (this) { - this.datasetsPerRequest = datasetsPerRequest; - } - } - - /** - * Checks if the ecloud dataset identifier already exists in ECloud and if it does not, it will try to create a new one and add - * the identifier inside the metis Dataset object and store. - *

This is an exception method that uses the {@link DataSetServiceClient} to communicate with - * the external dataset resource in ECloud

- * - * @param dataset the Dataset object to check - * @return the ECloud dataset identifier - * @throws ExternalTaskException if an error occurred during the creation of the dataset identifier on ECloud - */ - public String checkAndCreateDatasetInEcloud(Dataset dataset) throws ExternalTaskException { - if (StringUtils.isEmpty(dataset.getEcloudDatasetId()) || dataset.getEcloudDatasetId() - .startsWith("NOT_CREATED_YET")) { - final String uuid = UUID.randomUUID().toString(); - dataset.setEcloudDatasetId(uuid); - try { - ecloudDataSetServiceClient - .createDataSet(getEcloudProvider(), uuid, "Metis generated dataset"); - update(dataset); - } catch (DataSetAlreadyExistsException e) { - throw new ExternalTaskException("Dataset already exist, not recreating", e); - } catch (MCSException e) { - throw new ExternalTaskException("An error has occurred during ecloud dataset creation.", e); - } - } else { - LOGGER - .info("Dataset with datasetId {} already has a dataset initialized in Ecloud with id {}", - dataset.getDatasetId(), dataset.getEcloudDatasetId()); - } - return dataset.getEcloudDatasetId(); - } - - /** - * Get the list of of matching DatasetSearch using dataset - * - * @param datasetIdWords a list of words to be used for datasetId search, that field is searched as a "starts with" operation - * @param words a list of words to be used for datasetName, provider and dataProvider search. Those words are considered as AND - * operation for each individual field. - * @param nextPage the nextPage number, must be positive - * @return a list with the datasets found - */ - public List searchDatasetsBasedOnSearchString(List datasetIdWords, - List words, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore().find(Dataset.class); - final List datasetIdFilters = new ArrayList<>(datasetIdWords.size()); - final List datasetNameFilters = new ArrayList<>(words.size()); - final List providerIdFilters = new ArrayList<>(words.size()); - final List dataProviderIdFilters = new ArrayList<>(words.size()); - - //Search on datasetId, only words that start with a numeric character - for (String datasetIdWord : datasetIdWords) { - datasetIdFilters.add(Filters.regex(DATASET_ID.getFieldName()) - .pattern(Pattern.compile("^" + Pattern.quote(datasetIdWord)))); - } - - //Search on provider and dataProvider - for (String word : words) { - datasetNameFilters.add(Filters.regex(DATASET_NAME.getFieldName()) - .pattern(Pattern.compile(word, Pattern.CASE_INSENSITIVE))); - providerIdFilters.add(Filters.regex(PROVIDER.getFieldName()) - .pattern(Pattern.compile(word, Pattern.CASE_INSENSITIVE))); - dataProviderIdFilters.add(Filters.regex(DATA_PROVIDER.getFieldName()) - .pattern(Pattern.compile(word, Pattern.CASE_INSENSITIVE))); - } - final List filterGroups = new ArrayList<>(); - if (!datasetIdFilters.isEmpty()) { - filterGroups.add(Filters.or(datasetIdFilters.toArray(Filter[]::new))); - } - if (!datasetNameFilters.isEmpty()) { - filterGroups.add(Filters.or(datasetNameFilters.toArray(Filter[]::new))); - } - if (!providerIdFilters.isEmpty()) { - filterGroups.add(Filters.or(providerIdFilters.toArray(Filter[]::new))); - } - if (!dataProviderIdFilters.isEmpty()) { - filterGroups.add(Filters.or(dataProviderIdFilters.toArray(Filter[]::new))); - } - - if (!filterGroups.isEmpty()) { - query.filter(Filters.or(filterGroups.toArray(Filter[]::new))); - } - - final FindOptions findOptions = new FindOptions() - .sort(Sort.ascending(DATASET_ID.getFieldName())).skip(nextPage * getDatasetsPerRequest()) - .limit(getDatasetsPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Check if a dataset exists using a datasetName. - * - * @param datasetName the datasetName - * @return true if exist or false if it does not exist - */ - boolean existsDatasetByDatasetName(String datasetName) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Dataset.class) - .filter(Filters.eq(DATASET_NAME.getFieldName(), datasetName)) - .first(new FindOptions().projection().include(ID.getFieldName()))) != null; - } - - public void setEcloudProvider(String ecloudProvider) { - synchronized (this) { - this.ecloudProvider = ecloudProvider; - } - } - - private String getEcloudProvider() { - synchronized (this) { - return this.ecloudProvider; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java deleted file mode 100644 index 7da54b6544..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DatasetXsltDao.java +++ /dev/null @@ -1,120 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_ID; -import static eu.europeana.metis.core.common.DaoFieldNames.ID; -import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptions; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; - -import com.mongodb.client.result.DeleteResult; -import dev.morphia.DeleteOptions; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.Sort; -import dev.morphia.query.filters.Filters; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import java.util.Optional; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Repository; - -/** - * Dataset Access Object for xslts using Mongo - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-27 - */ -@Repository -public class DatasetXsltDao implements MetisDao { - - private static final Logger LOGGER = LoggerFactory.getLogger(DatasetXsltDao.class); - - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - - /** - * Constructs the DAO - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} used to access Mongo - */ - @Autowired - public DatasetXsltDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - } - - @Override - public DatasetXslt create(DatasetXslt datasetXslt) { - final ObjectId objectId = Optional.ofNullable(datasetXslt.getId()).orElseGet(ObjectId::new); - datasetXslt.setId(objectId); - DatasetXslt datasetSaved = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(datasetXslt)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("DatasetXslt for datasetId: '{}'created in Mongo", - CRLF_PATTERN.matcher(datasetXslt.getDatasetId()).replaceAll("")); - } - return datasetSaved; - } - - @Override - public String update(DatasetXslt datasetXslt) { - DatasetXslt datasetXsltSaved = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(datasetXslt)); - LOGGER.debug("DatasetXslt for datasetId: '{}' updated in Mongo", datasetXslt.getDatasetId()); - return datasetXsltSaved == null ? null : datasetXsltSaved.getId().toString(); - } - - @Override - public DatasetXslt getById(String id) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DatasetXslt.class) - .filter(Filters.eq(ID.getFieldName(), new ObjectId(id))).first()); - } - - @Override - public boolean delete(DatasetXslt datasetXslt) { - retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DatasetXslt.class) - .filter(Filters.eq(ID.getFieldName(), datasetXslt.getId())).delete()); - LOGGER.debug("DatasetXslt with objectId: '{}', datasetId: '{}'deleted in Mongo", - datasetXslt.getId(), datasetXslt.getDatasetId()); - return true; - } - - /** - * Delete All Xslts but using a dataset identifier. - * - * @param datasetId the dataset identifier - * @return true if something was found and deleted or false - */ - public boolean deleteAllByDatasetId(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore().find(DatasetXslt.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - DeleteResult deleteResult = retryableExternalRequestForNetworkExceptions( - () -> query.delete(new DeleteOptions().multi(true))); - LOGGER.debug("Xslts with datasetId: {}, deleted from Mongo", datasetId); - return (deleteResult == null ? 0 : deleteResult.getDeletedCount()) >= 1; - } - - /** - * Fet latest stored xslt using a dataset identifier. - * - * @param datasetId the dataset identifier - * @return the {@link DatasetXslt} object - */ - DatasetXslt getLatestXsltForDatasetId(String datasetId) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DatasetXslt.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)) - .first(new FindOptions().sort(Sort.descending("createdDate")))); - } - - /** - * Fet latest stored default xslt. - * - * @return the {@link DatasetXslt} object - */ - public DatasetXslt getLatestDefaultXslt() { - return getLatestXsltForDatasetId(DatasetXslt.DEFAULT_DATASET_ID); - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java deleted file mode 100644 index 17070442da..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/DepublishRecordIdDao.java +++ /dev/null @@ -1,407 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.mongo.utils.MorphiaUtils.getListOfQueryRetryable; -import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptions; - -import dev.morphia.DeleteOptions; -import dev.morphia.UpdateOptions; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.filters.Filters; -import dev.morphia.query.updates.UpdateOperator; -import dev.morphia.query.updates.UpdateOperators; -import eu.europeana.metis.core.dataset.DepublishRecordId; -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.rest.DepublishRecordIdView; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.exception.BadContentException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; -import org.bson.types.ObjectId; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.lang.Nullable; -import org.springframework.stereotype.Repository; -import org.springframework.util.CollectionUtils; - -/** - * DAO for {@link DepublishRecordId} objects. - */ -@Repository -public class DepublishRecordIdDao { - - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - private final long maxDepublishRecordIdsPerDataset; - private final int pageSize; - - /** - * Constructor. - * - * @param morphiaDatastoreProvider The datastore provider. - * @param maxDepublishRecordIdsPerDataset The maximum number of records we allow per dataset. - */ - @Autowired - public DepublishRecordIdDao(MorphiaDatastoreProvider morphiaDatastoreProvider, - @Value("${max.depublish.record.ids.per.dataset}") long maxDepublishRecordIdsPerDataset) { - this(morphiaDatastoreProvider, maxDepublishRecordIdsPerDataset, - RequestLimits.DEPUBLISHED_RECORDS_PER_REQUEST.getLimit()); - } - - /** - * Constructor allowing setting the page size - * - * @param morphiaDatastoreProvider The datastore provider. - * @param maxDepublishRecordIdsPerDataset The maximum number of records we allow per dataset. - * @param pageSize The page size for list requests. - */ - DepublishRecordIdDao(MorphiaDatastoreProvider morphiaDatastoreProvider, - long maxDepublishRecordIdsPerDataset, int pageSize) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - this.maxDepublishRecordIdsPerDataset = maxDepublishRecordIdsPerDataset; - this.pageSize = pageSize; - } - - private Set getNonExistingRecordIds(String datasetId, Set recordIds) { - return retryableExternalRequestForNetworkExceptions(() -> { - - // Create query for existing records in list. Only return record IDs. - final Query query = morphiaDatastoreProvider.getDatastore() - .find(DepublishRecordId.class); - query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)); - query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIds)); - - // Execute query and find existing record IDs. - final FindOptions findOptions = new FindOptions(); - findOptions.projection().include(DepublishRecordId.RECORD_ID_FIELD); - findOptions.projection().exclude(DepublishRecordId.ID_FIELD); - final Set existing; - existing = getListOfQueryRetryable(query, findOptions).stream() - .map(DepublishRecordId::getRecordId).collect(Collectors.toSet()); - - // Return the other ones: the record IDs not found in the database. - return recordIds.stream().filter(recordId -> !existing.contains(recordId)) - .collect(Collectors.toSet()); - }); - } - - /** - * Add depublished records to persistence. This method checks whether the depublished record already exists, and if so, doesn't - * add it again. All new records (but not the existing ones) will have the default depublication status - * ({@link DepublicationStatus#PENDING_DEPUBLICATION}) and no depublication date. - * - * @param datasetId The dataset to which the records belong. - * @param candidateRecordIds The IDs of the depublish record ids to add. - * @return How many of the passed records were in fact added. This counter is not thread-safe: if multiple threads try to add - * the same records, their combined counters may overrepresent the number of records that were actually added. - * @throws BadContentException In case adding the records would violate the maximum number of depublished records that each - * dataset can have. - */ - public int createRecordIdsToBeDepublished(String datasetId, Set candidateRecordIds) - throws BadContentException { - - // Check list size: if this is too large we can throw exception regardless of what's in the database. - if (candidateRecordIds.size() > maxDepublishRecordIdsPerDataset) { - throw new BadContentException( - "Can't add these records: this would violate the maximum number of records per dataset."); - } - - // Get the nonexisting records: those we actually add. - final Set recordIdsToAdd = getNonExistingRecordIds(datasetId, candidateRecordIds); - - // Count: determine whether we are not above our maximum. - final long existingCount = countDepublishRecordIdsForDataset(datasetId); - if (existingCount + recordIdsToAdd.size() > maxDepublishRecordIdsPerDataset) { - throw new BadContentException( - "Can't add these records: this would violate the maximum number of records per dataset."); - } - - // Add the records and we're done. - addRecords(recordIdsToAdd, datasetId, DepublicationStatus.PENDING_DEPUBLICATION, null); - return recordIdsToAdd.size(); - } - - void addRecords(Set recordIdsToAdd, String datasetId, - DepublicationStatus depublicationStatus, Instant depublicationDate) { - final List objectsToAdd = recordIdsToAdd.stream().map(recordId -> { - final DepublishRecordId depublishRecordId = new DepublishRecordId(); - depublishRecordId.setId(new ObjectId()); - depublishRecordId.setDatasetId(datasetId); - depublishRecordId.setRecordId(recordId); - depublishRecordId.setDepublicationStatus(depublicationStatus); - depublishRecordId.setDepublicationDate(depublicationDate); - return depublishRecordId; - }).toList(); - retryableExternalRequestForNetworkExceptions(() -> { - morphiaDatastoreProvider.getDatastore().save(objectsToAdd); - return Optional.empty(); - }); - } - - /** - * Deletes a list of record ids from the database. Only record ids that are in a - * {@link DepublicationStatus#PENDING_DEPUBLICATION} state will be removed. - * - * @param datasetId The dataset to which the depublish record ids belong. - * @param recordIds The depublish record ids to be removed - * @return The number or record ids that were removed. - * @throws BadContentException In case adding the records would violate the maximum number of depublished records that each - * dataset can have. - */ - public Long deletePendingRecordIds(String datasetId, Set recordIds) - throws BadContentException { - - // Check list size: if this is too large we can throw exception regardless of what's in the database. - if (recordIds.size() > maxDepublishRecordIdsPerDataset) { - throw new BadContentException( - "Can't remove these records: this would violate the maximum number of records per dataset."); - } - - final Query query = morphiaDatastoreProvider.getDatastore() - .find(DepublishRecordId.class); - query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)); - query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIds)); - query.filter(Filters.eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, - DepublicationStatus.PENDING_DEPUBLICATION)); - - return deleteRecords(query); - } - - /** - * Counts how many records we have for a given dataset. - * - * @param datasetId The ID of the dataset to count for. - * @return The number of records for the given dataset. - */ - long countDepublishRecordIdsForDataset(String datasetId) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DepublishRecordId.class) - .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).count()); - } - - /** - * Counts how many records we have for a given dataset that have the status {@link DepublicationStatus#DEPUBLISHED}. - * - * @param datasetId The ID of the dataset to count for. - * @return The number of records. - */ - public long countSuccessfullyDepublishedRecordIdsForDataset(String datasetId) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(DepublishRecordId.class) - .filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)).filter(Filters - .eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, DepublicationStatus.DEPUBLISHED)) - .count()); - } - - /** - * Get a list of depublish records for a given dataset. - *

Ids are retrieved regardless of their status

- * - * @param datasetId The dataset for which to retrieve the records. Cannot be null. - * @param page The page (batch) number, starting at 0. Cannot be null. - * @param sortField The sorting field. Cannot be null. - * @param sortDirection The sorting direction. Cannot be null. - * @param searchQuery Search query for the record ID. Can be null. - * @return A (possibly empty) list of depublish record ids. - */ - public List getDepublishRecordIds(String datasetId, int page, - DepublishRecordIdSortField sortField, SortDirection sortDirection, String searchQuery) { - final Query query = prepareQueryForDepublishRecordIds(datasetId, null, - searchQuery); - - // Compute pagination - final FindOptions findOptions = new FindOptions() - .sort(sortDirection.createSort(sortField.getDatabaseField())).skip(page * pageSize) - .limit(pageSize); - - // Execute query with correct pagination - final List result = getListOfQueryRetryable(query, findOptions); - - // Convert result to right object. - return result.stream().map(DepublishRecordIdView::new).toList(); - } - - /** - * Get all depublished records for a given dataset. - *

This method is to be used with caution since it doesn't have a limit on the returned items. - * It is mainly used to minimize, internal to the application, database requests. Ids are returned based on the provided status - * filter parameter

- * - * @param datasetId The dataset for which to retrieve the records. Cannot be null. - * @param sortField The sorting field. Cannot be null. - * @param sortDirection The sorting direction. Cannot be null. - * @param depublicationStatus The depublication status of the records. Can be null. - * @return A (possibly empty) list of depublish record ids. - * @throws BadContentException In case the records would violate the maximum number of depublished records that each dataset can - * have. - */ - public Set getAllDepublishRecordIdsWithStatus(String datasetId, - DepublishRecordIdSortField sortField, SortDirection sortDirection, - DepublicationStatus depublicationStatus) throws BadContentException { - return getAllDepublishRecordIdsWithStatus(datasetId, sortField, sortDirection, - depublicationStatus, Collections.emptySet()); - } - - - /** - * Get all depublished records for a given dataset. - *

This method is to be used with caution since it doesn't have a limit on the returned items. - * It is mainly used to minimize, internal to the application, database requests. Ids are returned based on the provided status - * filter parameter

- * - * @param datasetId The dataset for which to retrieve the records. Cannot be null. - * @param sortField The sorting field. Cannot be null. - * @param sortDirection The sorting direction. Cannot be null. - * @param depublicationStatus The depublication status of the records. Can be null. - * @param recordIds The record ids provided, that are to be checked upon. Can be null/empty - * @return A (possibly empty) list of depublish record ids. - * @throws BadContentException In case the records would violate the maximum number of depublished records that each dataset can - * have. - */ - public Set getAllDepublishRecordIdsWithStatus(String datasetId, - DepublishRecordIdSortField sortField, SortDirection sortDirection, - DepublicationStatus depublicationStatus, Set recordIds) throws BadContentException { - // Check list size: if this is too large we can throw exception regardless of what's in the database. - if (!CollectionUtils.isEmpty(recordIds) && (recordIds.size() - > maxDepublishRecordIdsPerDataset)) { - throw new BadContentException( - "RecordIds set is too big and violates the maximum number of records per dataset"); - } - - final Query query = prepareQueryForDepublishRecordIds(datasetId, - depublicationStatus, null); - final FindOptions findOptions = new FindOptions(); - findOptions.projection().include(DepublishRecordId.RECORD_ID_FIELD); - findOptions.projection().exclude(DepublishRecordId.ID_FIELD); - findOptions.sort(sortDirection.createSort(sortField.getDatabaseField())); - - if (!CollectionUtils.isEmpty(recordIds)) { - query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIds)); - } - - // Execute query with correct pagination - final List result = getListOfQueryRetryable(query, findOptions); - - // Convert result to right object. - return result.stream().map(DepublishRecordId::getRecordId).collect(Collectors.toSet()); - } - - private Query prepareQueryForDepublishRecordIds(String datasetId, - DepublicationStatus depublicationStatus, String searchQuery) { - // Create query. - final Query query = morphiaDatastoreProvider.getDatastore() - .find(DepublishRecordId.class); - query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)); - if (Objects.nonNull(depublicationStatus)) { - query.filter(Filters.eq(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, depublicationStatus)); - } - if (StringUtils.isNotBlank(searchQuery)) { - query.filter(Filters.regex(DepublishRecordId.RECORD_ID_FIELD, Pattern.quote(searchQuery))); - } - - return query; - } - - /** - * This method marks record ids with the provided {@link DepublicationStatus} and {@link Date} where appropriate. - *

A {@link DepublicationStatus#PENDING_DEPUBLICATION} unsets the depublication date

- *

A {@link DepublicationStatus#DEPUBLISHED} sets the depublication date with the one - * provided

- * - * @param datasetId the dataset for which to do this. Cannot be null - * @param recordIds the records for which to set this. Can be null or empty, in which case the operation will be performed on - * all records. If it is not empty, a new record will be created if a record with the given record ID is not already present. - * @param depublicationStatus the depublication status. Cannot be null - * @param depublicationDate the depublication date. Can be null only if depublicationStatus is - * {@link DepublicationStatus#PENDING_DEPUBLICATION} - */ - public void markRecordIdsWithDepublicationStatus(String datasetId, Set recordIds, - DepublicationStatus depublicationStatus, @Nullable Date depublicationDate) { - - // Check correctness of parameters - if (Objects.isNull(depublicationStatus) || StringUtils.isBlank(datasetId)) { - throw new IllegalArgumentException( - "DepublicationStatus cannot be null and datasetId cannot be empty"); - } else if (depublicationStatus == DepublicationStatus.DEPUBLISHED && Objects - .isNull(depublicationDate)) { - throw new IllegalArgumentException(String - .format("DepublicationDate cannot be null if depublicationStatus == %s ", - DepublicationStatus.DEPUBLISHED.name())); - } - - // If we have a specific record list, make sure that missing records are added. - final Set recordIdsToUpdate; // null if and only if we need to update all records - if (CollectionUtils.isEmpty(recordIds)) { - recordIdsToUpdate = null; - } else { - - // Add the records that are missing. - final Set recordIdsToAdd = getNonExistingRecordIds(datasetId, recordIds); - final Instant depublicationInstant = Optional.ofNullable(depublicationDate) - .filter( - date -> depublicationStatus != DepublicationStatus.PENDING_DEPUBLICATION) - .map(Date::toInstant).orElse(null); - addRecords(recordIdsToAdd, datasetId, depublicationStatus, depublicationInstant); - - // Compute the records to update - if there are none, we're done. - recordIdsToUpdate = new HashSet<>(recordIds); - recordIdsToUpdate.removeAll(recordIdsToAdd); - if (recordIdsToUpdate.isEmpty()) { - return; - } - } - - // Create query. - final Query query = morphiaDatastoreProvider.getDatastore() - .find(DepublishRecordId.class); - query.filter(Filters.eq(DepublishRecordId.DATASET_ID_FIELD, datasetId)); - if (recordIdsToUpdate != null) { - query.filter(Filters.in(DepublishRecordId.RECORD_ID_FIELD, recordIdsToUpdate)); - } - - // Define the update operations. - final ArrayList updateOperators = new ArrayList<>(); - updateOperators.add(UpdateOperators - .set(DepublishRecordId.DEPUBLICATION_STATUS_FIELD, - depublicationStatus)); - if (depublicationStatus == DepublicationStatus.PENDING_DEPUBLICATION) { - updateOperators.add(UpdateOperators.unset(DepublishRecordId.DEPUBLICATION_DATE_FIELD)); - } else { - updateOperators.add( - UpdateOperators.set(DepublishRecordId.DEPUBLICATION_DATE_FIELD, - depublicationDate == null? Date.from(Instant.now()): depublicationDate) - ); - } - - // Apply the operations. - retryableExternalRequestForNetworkExceptions( - () -> query.update(new UpdateOptions().multi(true), updateOperators.toArray(UpdateOperator[]::new))); - } - - /** - * Returns the page size imposed by this DAO. - * - * @return The page size. - */ - public int getPageSize() { - return pageSize; - } - - long deleteRecords(Query query) { - return retryableExternalRequestForNetworkExceptions( - () -> query.delete(new DeleteOptions().multi(true)).getDeletedCount()); - } - -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ExecutedMetisPluginId.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ExecutedMetisPluginId.java deleted file mode 100644 index 610f2b8dd7..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ExecutedMetisPluginId.java +++ /dev/null @@ -1,91 +0,0 @@ -package eu.europeana.metis.core.dao; - -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.MetisPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import java.util.Date; -import java.util.Objects; - -/** - * Instances of this class uniquely define a plugin that has been completed. It can be used to test - * equality of executed plugins. - */ -public class ExecutedMetisPluginId { - - private final Date pluginStartedDate; - private final PluginType pluginType; - - ExecutedMetisPluginId(Date pluginStartedDate, PluginType pluginType) { - this.pluginStartedDate = pluginStartedDate != null ? new Date(pluginStartedDate.getTime()) : null; - this.pluginType = pluginType; - if (this.pluginStartedDate == null || this.pluginType == null) { - throw new IllegalArgumentException(); - } - } - - /** - * Creates the ID of this plugin. - * - * @param plugin The pluign for which to create the ID. - * @return The ID of this plugin, or null if this plugin has not been started yet. - */ - public static ExecutedMetisPluginId forPlugin(MetisPlugin plugin) { - final Date startedDate = plugin.getStartedDate(); - if (startedDate == null) { - return null; - } - return new ExecutedMetisPluginId(startedDate, plugin.getPluginType()); - } - - /** - * Extracts the ID of the predecessor plugin of this plugin. - * - * @param plugin The plugin for which to extract the predecessor ID. - * @return The ID of the predecessor, or null if no predecessor defined. - */ - public static ExecutedMetisPluginId forPredecessor(MetisPlugin plugin) { - return forPredecessor(plugin.getPluginMetadata()); - } - - /** - * Extracts the ID of the predecessor plugin of this plugin. - * - * @param metadata The metadata of the plugin for which to extract the predecessor ID. - * @return The ID of the predecessor, or null if no predecessor defined. - */ - public static ExecutedMetisPluginId forPredecessor(MetisPluginMetadata metadata) { - final Date previousPluginTimestamp = metadata.getRevisionTimestampPreviousPlugin(); - final PluginType previousPluginType = PluginType.getPluginTypeFromEnumName( - metadata.getRevisionNamePreviousPlugin()); - if (previousPluginTimestamp == null || previousPluginType == null) { - return null; - } - return new ExecutedMetisPluginId(previousPluginTimestamp, previousPluginType); - } - - public Date getPluginStartedDate() { - return pluginStartedDate != null ? new Date(pluginStartedDate.getTime()) : null; - } - - public PluginType getPluginType() { - return pluginType; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final ExecutedMetisPluginId that = (ExecutedMetisPluginId) o; - return Objects.equals(pluginStartedDate, that.getPluginStartedDate()) && - getPluginType() == that.getPluginType(); - } - - @Override - public int hashCode() { - return Objects.hash(pluginStartedDate, getPluginType()); - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/MetisDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/MetisDao.java deleted file mode 100644 index 8c9a596567..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/MetisDao.java +++ /dev/null @@ -1,43 +0,0 @@ -package eu.europeana.metis.core.dao; - - -/** - * Interface specifying the minimum methods for a DAO persisting - * - * @param the type of class that should be used as an entry - * @param the type of class that should be used for return values - */ -public interface MetisDao { - - /** - * Create an entry in the database. - * - * @param t the class to be stored - * @return a value when the method finishes, can be different than the one stored. - */ - T create(T t); - - /** - * Update an entry in the database. - * - * @param t the class to be updated - * @return a value when the method finishes, can be different than the one stored. - */ - S update(T t); - - /** - * Get an entry from the database using the identifier used in the database for unique identification. - * - * @param id the identifier to find the entry - * @return the entry in the database - */ - T getById(S id); - - /** - * Delete an entry in the database. - * - * @param t the class to be delete - * @return boolean that indicates the status of the deletion - */ - boolean delete(T t); -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/PluginWithExecutionId.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/PluginWithExecutionId.java deleted file mode 100644 index 6837fd8ac4..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/PluginWithExecutionId.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.europeana.metis.core.dao; - -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; - -/** - * This object contains a pair consisting of a workflow execution ID and a plugin. - * - * @param The plugin type. - */ -public class PluginWithExecutionId { - - private final String executionId; - private final T plugin; - - /** - * Constructor. - * - * @param execution The execution. - * @param plugin The plugin. - */ - public PluginWithExecutionId(WorkflowExecution execution, T plugin) { - this(execution.getId().toString(), plugin); - } - - /** - * Constructor. - * - * @param executionId The execution ID. - * @param plugin The plugin. - */ - public PluginWithExecutionId(String executionId, T plugin) { - this.executionId = executionId; - this.plugin = plugin; - } - - public String getExecutionId() { - return executionId; - } - - public T getPlugin() { - return plugin; - } - - @Override - public boolean equals(Object otherObject) { - if (otherObject == null || otherObject.getClass() != this.getClass()) { - return false; - } - final PluginWithExecutionId other = (PluginWithExecutionId) otherObject; - return this.getPlugin().getId().equals(other.getPlugin().getId()); - } - - @Override - public int hashCode() { - return this.getPlugin().getId().hashCode(); - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java deleted file mode 100644 index 59f7bd8c58..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/ScheduledWorkflowDao.java +++ /dev/null @@ -1,242 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_ID; -import static eu.europeana.metis.core.common.DaoFieldNames.ID; -import static eu.europeana.metis.mongo.utils.MorphiaUtils.getListOfQueryRetryable; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; - -import com.mongodb.client.result.DeleteResult; -import dev.morphia.DeleteOptions; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.filters.Filter; -import dev.morphia.query.filters.Filters; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.network.ExternalRequestUtil; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Repository; - -/** - * DAO class for {@link ScheduledWorkflow} - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-25 - */ -@Repository -public class ScheduledWorkflowDao implements MetisDao { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScheduledWorkflowDao.class); - private int scheduledWorkflowPerRequest = RequestLimits.SCHEDULED_EXECUTIONS_PER_REQUEST - .getLimit(); - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - - /** - * Autowired Constructor with required {@link MorphiaDatastoreProvider} parameters. - * - * @param morphiaDatastoreProvider the class that handles the connection to the database - */ - @Autowired - public ScheduledWorkflowDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - } - - @Override - public ScheduledWorkflow create(ScheduledWorkflow scheduledWorkflow) { - final ObjectId objectId = Optional.ofNullable(scheduledWorkflow.getId()) - .orElseGet(ObjectId::new); - scheduledWorkflow.setId(objectId); - ScheduledWorkflow scheduledWorkflowSaved = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(scheduledWorkflow)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("ScheduledWorkflow for datasetName: '{}' created in Mongo", - CRLF_PATTERN.matcher(scheduledWorkflow.getDatasetId()).replaceAll("")); - } - return scheduledWorkflowSaved; - } - - @Override - public String update(ScheduledWorkflow scheduledWorkflow) { - ScheduledWorkflow scheduledWorkflowSaved = - ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(scheduledWorkflow)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("ScheduledWorkflow with datasetId: '{}' updated in Mongo", - CRLF_PATTERN.matcher(scheduledWorkflow.getDatasetId()).replaceAll("")); - } - return scheduledWorkflowSaved == null ? null : scheduledWorkflowSaved.getId().toString(); - } - - @Override - public ScheduledWorkflow getById(String id) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class) - .filter(Filters.eq(ID.getFieldName(), new ObjectId(id))); - return ExternalRequestUtil.retryableExternalRequestForNetworkExceptions(query::first); - } - - @Override - public boolean delete(ScheduledWorkflow scheduledWorkflow) { - return false; - } - - /** - * Get a shceduled workflow with {@code datasetId}. - * - * @param datasetId the dataset identifier - * @return the found ScheduledWorkflow or null - */ - public ScheduledWorkflow getScheduledWorkflow(String datasetId) { - return ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(ScheduledWorkflow.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)).first()); - } - - /** - * Get a ScheduledWorkflow using a datasetId. - * - * @param datasetId the dataset identifier - * @return the found ScheduledWorkflow or null - */ - public ScheduledWorkflow getScheduledWorkflowByDatasetId(String datasetId) { - return ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(ScheduledWorkflow.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)).first()); - } - - /** - * Check if a ScheduledWorkflow exists using {@link ScheduledWorkflow#getDatasetId()}. - * - * @param scheduledWorkflow the provided ScheduledWorkflow - * @return true if exist, otherwise false - */ - public boolean exists(ScheduledWorkflow scheduledWorkflow) { - return ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), scheduledWorkflow.getDatasetId())) - .first(new FindOptions().projection().include(ID.getFieldName()))) != null; - } - - /** - * Checks if a ScheduledWorkflow exists by datasetId. - * - * @param datasetId the dataset identifier - * @return the String representation of the ScheduledWorkflow identifier - */ - public String existsForDatasetId(String datasetId) { - ScheduledWorkflow storedScheduledWorkflow = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(ScheduledWorkflow.class) - .filter( - Filters.eq(DATASET_ID.getFieldName(), datasetId)) - .first(new FindOptions().projection().include(ID.getFieldName()))); - return storedScheduledWorkflow == null ? null : storedScheduledWorkflow.getId().toString(); - } - - /** - * Delete a ScheduledWorkflow using a datasetId. - * - * @param datasetId the dataset identifier - * @return true if one was deleted, false if none was deleted - */ - public boolean deleteScheduledWorkflow(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - DeleteResult delete = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions(query::delete); - LOGGER.debug( - "ScheduledWorkflow with datasetId: {} deleted from Mongo", - datasetId); - return (delete == null ? 0 : delete.getDeletedCount()) == 1; - } - - /** - * Deletes all ScheduledWorkflows using a datasetId. - * - * @param datasetId the dataset identifier - * @return true if at least one was deleted, false if none - */ - public boolean deleteAllByDatasetId(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - DeleteResult deleteResult = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions(() -> query.delete(new DeleteOptions().multi(true))); - LOGGER.debug( - "ScheduledWorkflows with datasetId: {} deleted from Mongo", datasetId); - return (deleteResult == null ? 0 : deleteResult.getDeletedCount()) >= 1; - } - - /** - * Get all ScheduledWorkflows using a {@link ScheduleFrequence} filter paged. - * - * @param scheduleFrequence the frequence used to filter the results - * @param nextPage the nextPage positive number - * @return a list of ScheduledWorkflows - */ - public List getAllScheduledWorkflows( - ScheduleFrequence scheduleFrequence, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class); - if (scheduleFrequence != null && scheduleFrequence != ScheduleFrequence.NULL) { - query.filter(Filters.eq("scheduleFrequence", scheduleFrequence)); - } - final FindOptions findOptions = new FindOptions() - .skip(nextPage * getScheduledWorkflowPerRequest()) - .limit(getScheduledWorkflowPerRequest()); - return getListOfQueryRetryable(query, findOptions); - } - - /** - * Get all ScheduledWorkflows using a date range check. - * - * @param lowerBound the lower edge of the date range with a check of greater or equal - * @param upperBound the upper edge of the date range with a check of lower than - * @param nextPage the nextPage positive number - * @return a list of ScheduledWorkflows - */ - public List getAllScheduledWorkflowsByDateRangeONCE( - LocalDateTime lowerBound, - LocalDateTime upperBound, int nextPage) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(ScheduledWorkflow.class); - final Filter scheduleFrequenceFilter = Filters.eq("scheduleFrequence", ScheduleFrequence.ONCE); - final Filter pointerDateLowerBoundFilter = Filters - .gte("pointerDate", Date.from(lowerBound.atZone(ZoneId.systemDefault()).toInstant())); - final Filter pointerDateUpperBoundFilter = Filters - .lt("pointerDate", Date.from(upperBound.atZone(ZoneId.systemDefault()).toInstant())); - query.filter(Filters - .and(scheduleFrequenceFilter, pointerDateLowerBoundFilter, pointerDateUpperBoundFilter)); - - final FindOptions findOptions = new FindOptions() - .skip(nextPage * getScheduledWorkflowPerRequest()) - .limit(getScheduledWorkflowPerRequest()); - return getListOfQueryRetryable(query, findOptions); - - } - - public int getScheduledWorkflowPerRequest() { - synchronized (this) { - return scheduledWorkflowPerRequest; - } - } - - public void setScheduledWorkflowPerRequest(int scheduledWorkflowPerRequest) { - synchronized (this) { - this.scheduledWorkflowPerRequest = scheduledWorkflowPerRequest; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java deleted file mode 100644 index 9da2e1e619..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowDao.java +++ /dev/null @@ -1,119 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_ID; -import static eu.europeana.metis.core.common.DaoFieldNames.ID; - -import com.mongodb.client.result.DeleteResult; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.filters.Filters; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.network.ExternalRequestUtil; -import java.util.Optional; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Repository; - -/** - * DAO object for workflows. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -@Repository -public class WorkflowDao implements MetisDao { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowDao.class); - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - - /** - * Constructs the DAO - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} used to access Mongo - */ - public WorkflowDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - } - - @Override - public Workflow create(Workflow workflow) { - final ObjectId objectId = Optional.ofNullable(workflow.getId()).orElseGet(ObjectId::new); - workflow.setId(objectId); - final Workflow workflowSaved = ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(workflow)); - LOGGER.info("Workflow for datasetId '{}' created in Mongo", workflow.getDatasetId()); - return workflowSaved; - } - - - @Override - public String update(Workflow workflow) { - final Workflow workflowSaved = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(workflow)); - LOGGER.info("Workflow for datasetId '{}' updated in Mongo", workflow.getDatasetId()); - return workflowSaved == null ? null : workflowSaved.getId().toString(); - } - - @Override - public Workflow getById(String id) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(Workflow.class).filter(Filters.eq(ID.getFieldName(), new ObjectId(id))); - return ExternalRequestUtil.retryableExternalRequestForNetworkExceptions(query::first); - } - - @Override - public boolean delete(Workflow workflow) { - return deleteWorkflow(workflow.getDatasetId()); - } - - /** - * Delete a workflow using datasetId. - * - * @param datasetId the dataset identifier - * @return true if the workflow was found and deleted - */ - public boolean deleteWorkflow(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore().find(Workflow.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - DeleteResult deleteResult = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions(query::delete); - LOGGER.info("Workflow with datasetId {}, deleted from Mongo", datasetId); - return (deleteResult == null ? 0 : deleteResult.getDeletedCount()) == 1; - } - - /** - * Check existence of a workflow for the given Dataset ID. - * - * @param datasetId The dataset ID. - * @return whether a workflow exists for this dataset. - */ - public boolean workflowExistsForDataset(String datasetId) { - return null != getWorkflowId(datasetId); - } - - private String getWorkflowId(String datasetId) { - Workflow storedWorkflow = ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Workflow.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)) - .first(new FindOptions().projection().include(ID.getFieldName()))); - return storedWorkflow == null ? null : storedWorkflow.getId().toString(); - } - - /** - * Get a workflow using a datasetId. - * - * @param datasetId the dataset id - * @return {@link Workflow} - */ - public Workflow getWorkflow(String datasetId) { - return ExternalRequestUtil - .retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(Workflow.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)).first()); - } -} - diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java deleted file mode 100644 index d5f00d761c..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowExecutionDao.java +++ /dev/null @@ -1,868 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static eu.europeana.metis.core.common.DaoFieldNames.CREATED_DATE; -import static eu.europeana.metis.core.common.DaoFieldNames.DATASET_ID; -import static eu.europeana.metis.core.common.DaoFieldNames.FINISHED_DATE; -import static eu.europeana.metis.core.common.DaoFieldNames.ID; -import static eu.europeana.metis.core.common.DaoFieldNames.METIS_PLUGINS; -import static eu.europeana.metis.core.common.DaoFieldNames.PLUGIN_METADATA; -import static eu.europeana.metis.core.common.DaoFieldNames.PLUGIN_STATUS; -import static eu.europeana.metis.core.common.DaoFieldNames.PLUGIN_TYPE; -import static eu.europeana.metis.core.common.DaoFieldNames.STARTED_DATE; -import static eu.europeana.metis.core.common.DaoFieldNames.WORKFLOW_STATUS; -import static eu.europeana.metis.core.common.DaoFieldNames.XSLT_ID; -import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptions; - -import com.mongodb.client.result.DeleteResult; -import com.mongodb.client.result.UpdateResult; -import dev.morphia.DeleteOptions; -import dev.morphia.UpdateOptions; -import dev.morphia.aggregation.Aggregation; -import dev.morphia.aggregation.expressions.ArrayExpressions; -import dev.morphia.aggregation.expressions.ComparisonExpressions; -import dev.morphia.aggregation.expressions.ConditionalExpressions; -import dev.morphia.aggregation.expressions.Expressions; -import dev.morphia.aggregation.expressions.MathExpressions; -import dev.morphia.aggregation.expressions.impls.Expression; -import dev.morphia.aggregation.expressions.impls.MathExpression; -import dev.morphia.aggregation.stages.Lookup; -import dev.morphia.aggregation.stages.Projection; -import dev.morphia.aggregation.stages.Sort; -import dev.morphia.aggregation.stages.Unwind; -import dev.morphia.annotations.Entity; -import dev.morphia.query.FindOptions; -import dev.morphia.query.Query; -import dev.morphia.query.filters.Filter; -import dev.morphia.query.filters.Filters; -import dev.morphia.query.updates.UpdateOperator; -import dev.morphia.query.updates.UpdateOperators; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.rest.RequestLimits; -import eu.europeana.metis.core.workflow.SystemId; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.mongo.utils.MorphiaUtils; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Repository; -import org.springframework.util.CollectionUtils; - -/** - * Data Access Object for workflow executions using mongo. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-26 - */ -@Repository -public class WorkflowExecutionDao implements MetisDao { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutionDao.class); - - private static final int INQUEUE_POSITION_IN_OVERVIEW = 1; - private static final int RUNNING_POSITION_IN_OVERVIEW = 2; - private static final int DEFAULT_POSITION_IN_OVERVIEW = 3; - - private final MorphiaDatastoreProvider morphiaDatastoreProvider; - private int workflowExecutionsPerRequest = - RequestLimits.WORKFLOW_EXECUTIONS_PER_REQUEST.getLimit(); - private int maxServedExecutionListLength = Integer.MAX_VALUE; - - /** - * Constructs the DAO - * - * @param morphiaDatastoreProvider {@link MorphiaDatastoreProvider} used to access Mongo - */ - @Autowired - public WorkflowExecutionDao(MorphiaDatastoreProvider morphiaDatastoreProvider) { - this.morphiaDatastoreProvider = morphiaDatastoreProvider; - } - - @Override - public WorkflowExecution create(WorkflowExecution workflowExecution) { - final ObjectId objectId = Optional.ofNullable(workflowExecution.getId()) - .orElseGet(ObjectId::new); - workflowExecution.setId(objectId); - final WorkflowExecution workflowExecutionSaved = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().save(workflowExecution)); - LOGGER.debug("WorkflowExecution for datasetId '{}' created in Mongo", - workflowExecution.getDatasetId()); - return workflowExecutionSaved; - } - - @Override - public String update(WorkflowExecution workflowExecution) { - final WorkflowExecution workflowExecutionSaved = retryableExternalRequestForNetworkExceptions( - () -> - morphiaDatastoreProvider.getDatastore().save(workflowExecution)); - LOGGER.debug("WorkflowExecution for datasetId '{}' updated in Mongo", - workflowExecution.getDatasetId()); - return workflowExecutionSaved == null ? null : workflowExecutionSaved.getId().toString(); - } - - /** - * Overwrites only the portion of the WorkflowExecution that contains the plugins. - * - * @param workflowExecution the WorkflowExecution to update - */ - public void updateWorkflowPlugins(WorkflowExecution workflowExecution) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId())); - - final UpdateOperator updateOperator = UpdateOperators - .set(METIS_PLUGINS.getFieldName(), workflowExecution.getMetisPlugins()); - - UpdateResult updateResult = retryableExternalRequestForNetworkExceptions( - () -> query.update(new UpdateOptions(), updateOperator)); - LOGGER.debug( - "WorkflowExecution metisPlugins for datasetId '{}' updated in Mongo. (UpdateResults: {})", - workflowExecution.getDatasetId(), - updateResult == null ? 0 : updateResult.getModifiedCount()); - } - - /** - * Overwrites only the portion of the WorkflowExecution that contains the monitor information(plugins, started date, updated - * date). - * - * @param workflowExecution the WorkflowExecution to update - */ - public void updateMonitorInformation(WorkflowExecution workflowExecution) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId())); - final ArrayList updateOperators = new ArrayList<>(); - updateOperators.add(UpdateOperators - .set(WORKFLOW_STATUS.getFieldName(), - workflowExecution.getWorkflowStatus())); - if (workflowExecution.getStartedDate() != null) { - updateOperators - .add(UpdateOperators.set("startedDate", workflowExecution.getStartedDate())); - } - if (workflowExecution.getUpdatedDate() != null) { - updateOperators - .add(UpdateOperators.set("updatedDate", workflowExecution.getUpdatedDate())); - } - updateOperators.add( - UpdateOperators.set(METIS_PLUGINS.getFieldName(), workflowExecution.getMetisPlugins())); - UpdateResult updateResult = retryableExternalRequestForNetworkExceptions( - () -> query.update(new UpdateOptions(), updateOperators.toArray(UpdateOperator[]::new))); - LOGGER.debug( - "WorkflowExecution monitor information for datasetId '{}' updated in Mongo. (UpdateResults: {})", - workflowExecution.getDatasetId(), - updateResult == null ? 0 : updateResult.getModifiedCount()); - } - - /** - * Set the cancelling field in the database. - *

Also adds information of the user identifier that cancelled the execution or if it was by a - * system operation, using {@link SystemId} values as identifiers. For historical executions the value of the - * cancelledBy field will remain null

- * - * @param workflowExecution the workflowExecution to be cancelled - * @param metisUserView the user that triggered the cancellation or null if it was the system - */ - public void setCancellingState(WorkflowExecution workflowExecution, MetisUserView metisUserView) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), workflowExecution.getId())); - String cancelledBy; - if (metisUserView == null || metisUserView.getUserId() == null) { - cancelledBy = SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name(); - } else { - cancelledBy = metisUserView.getUserId(); - } - final UpdateOperator setCancellingOperator = UpdateOperators.set("cancelling", Boolean.TRUE); - final UpdateOperator setCancelledByOperator = UpdateOperators.set("cancelledBy", cancelledBy); - - UpdateResult updateResult = retryableExternalRequestForNetworkExceptions( - () -> query.update(new UpdateOptions(), setCancellingOperator, setCancelledByOperator)); - LOGGER.debug( - "WorkflowExecution cancelling for datasetId '{}' set to true in Mongo. (UpdateResults: {})", - workflowExecution.getDatasetId(), - updateResult == null ? 0 : updateResult.getModifiedCount()); - } - - @Override - public WorkflowExecution getById(String id) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), new ObjectId(id))); - return retryableExternalRequestForNetworkExceptions(query::first); - } - - @Override - public boolean delete(WorkflowExecution workflowExecution) { - return false; - } - - /** - * Get the WorkflowExecution for a dataset identifier that is {@link WorkflowStatus#INQUEUE} or {@link WorkflowStatus#RUNNING} - * - * @param datasetId the dataset identifier - * @return the WorkflowExecution if found - */ - public WorkflowExecution getRunningOrInQueueExecution(String datasetId) { - Query query = runningOrInqueueQuery(datasetId); - return retryableExternalRequestForNetworkExceptions(query::first); - } - - /** - * Check the existence of a WorkflowExecution in the database. - * - * @param workflowExecution the WorkflowExecution to check upon - * @return true if it exist, false if it does not exist - */ - public boolean exists(WorkflowExecution workflowExecution) { - return retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class) - .filter(Filters.eq(DATASET_ID.getFieldName(), workflowExecution.getDatasetId())) - .first(new FindOptions().projection().include(ID.getFieldName()))) != null; - } - - /** - * Check if a WorkflowExecution exists for a dataset identifier and has not completed it's execution. - * - * @param datasetId the dataset identifier - * @return the identifier of the execution if found, otherwise null - */ - public String existsAndNotCompleted(String datasetId) { - Query query = runningOrInqueueQuery(datasetId); - - final FindOptions findOptions = new FindOptions(); - findOptions.projection().include(ID.getFieldName()); - findOptions.projection().include(WORKFLOW_STATUS.getFieldName()); - - WorkflowExecution storedWorkflowExecution = retryableExternalRequestForNetworkExceptions( - () -> query.first(findOptions)); - if (storedWorkflowExecution != null) { - return storedWorkflowExecution.getId().toString(); - } - return null; - } - - private Query runningOrInqueueQuery(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class); - - final Filter datasetIdFilter = Filters.eq(DATASET_ID.getFieldName(), datasetId); - final Filter workflowStatusFilter = Filters - .or(Filters.eq(WORKFLOW_STATUS.getFieldName(), WorkflowStatus.INQUEUE), - Filters.eq(WORKFLOW_STATUS.getFieldName(), WorkflowStatus.RUNNING)); - query.filter(datasetIdFilter, workflowStatusFilter); - - return query; - } - - /** - * Get the first successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types - * - * @param datasetId the dataset identifier - * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values. - * @return the first plugin found - */ - public PluginWithExecutionId getFirstSuccessfulPlugin(String datasetId, - Set pluginTypes) { - return Optional.ofNullable(getFirstOrLastFinishedPlugin(datasetId, pluginTypes, true)) - .orElse(null); - } - - /** - * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types - * - * @param datasetId the dataset identifier - * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values. - * @return the last plugin found - */ - public PluginWithExecutionId getLatestSuccessfulPlugin(String datasetId, - Set pluginTypes) { - return Optional.ofNullable(getFirstOrLastFinishedPlugin(datasetId, pluginTypes, false)) - .orElse(null); - } - - /** - * Get the last successful Plugin of a WorkflowExecution for a dataset identifier and a set of plugin types - * - * @param datasetId the dataset identifier - * @param pluginTypes the set of plugin types to check for. Cannot be null or contain null values. - * @param limitToValidData Only return the result if it has valid data (see {@link DataStatus}). - * @return the last plugin found - */ - public PluginWithExecutionId getLatestSuccessfulExecutablePlugin( - String datasetId, - Set pluginTypes, boolean limitToValidData) { - - // Verify the plugin types - verifyEnumSetIsValidAndNotEmpty(pluginTypes); - - // Perform the database query. If nothing found, we are done. - final Set convertedPluginTypes = pluginTypes.stream() - .map(ExecutablePluginType::toPluginType).collect(Collectors.toSet()); - final PluginWithExecutionId uncastResultWrapper = - getFirstOrLastFinishedPlugin(datasetId, convertedPluginTypes, false); - final MetisPlugin uncastResult = Optional.ofNullable(uncastResultWrapper) - .map(PluginWithExecutionId::getPlugin).orElse(null); - if (uncastResultWrapper == null || uncastResult == null) { - return null; - } - - // Check for the result type: it should be executable. - if (!(uncastResult instanceof ExecutablePlugin)) { - LOGGER.warn("Found plugin {} for executable plugin type {} that is not itself executable.", - uncastResult.getId(), uncastResult.getPluginType()); - return null; - } - final ExecutablePlugin castResult = (ExecutablePlugin) uncastResult; - - // if necessary, check for the data validity. - final PluginWithExecutionId result; - if (limitToValidData && MetisPlugin.getDataStatus(castResult) != DataStatus.VALID) { - result = null; - } else { - result = new PluginWithExecutionId<>(uncastResultWrapper.getExecutionId(), castResult); - } - return result; - } - - PluginWithExecutionId getFirstOrLastFinishedPlugin(String datasetId, - Set pluginTypes, boolean firstFinished) { - - // Verify the plugin types - verifyEnumSetIsValidAndNotEmpty(pluginTypes); - - // Create the filter to match a plugin satisfying the conditions. - final Filter datasetIdFilter = Filters.eq(DATASET_ID.getFieldName(), datasetId); - final Filter pluginStatusFilter = Filters - .eq(METIS_PLUGINS.getFieldName() + "." + PLUGIN_STATUS.getFieldName(), - PluginStatus.FINISHED); - - List pluginTypesFilters = new ArrayList<>(pluginTypes.size()); - final String pluginTypeField = METIS_PLUGINS.getFieldName() + "." + PLUGIN_TYPE.getFieldName(); - for (PluginType pluginType : pluginTypes) { - pluginTypesFilters.add(Filters.eq(pluginTypeField, pluginType)); - } - final Filter collectedFilters; - if (pluginTypesFilters.isEmpty()) { - collectedFilters = Filters.and(datasetIdFilter, pluginStatusFilter); - } else { - final Filter pluginTypeOrFilter = Filters.or(pluginTypesFilters.toArray(Filter[]::new)); - collectedFilters = Filters.and(datasetIdFilter, pluginStatusFilter, pluginTypeOrFilter); - } - - // Query: unwind and match again so that we know that all conditions apply to the same plugin. - final Aggregation aggregation = morphiaDatastoreProvider.getDatastore() - .aggregate(WorkflowExecution.class); - - final String orderField = - METIS_PLUGINS.getFieldName() + "." + FINISHED_DATE.getFieldName(); - aggregation.match(collectedFilters) - .unwind(Unwind.unwind(METIS_PLUGINS.getFieldName())) - .match(collectedFilters) - .sort(firstFinished ? Sort.sort().ascending(orderField) : Sort.sort().descending(orderField)) - .limit(1); - - final List metisPluginsIterator = MorphiaUtils - .getListOfAggregationRetryable(aggregation, - WorkflowExecution.class); - - // Because of the unwind, we know that the plugin we need is always the first one. - return Optional.ofNullable(metisPluginsIterator).stream().flatMap(Collection::stream) - .filter(execution -> !execution.getMetisPlugins().isEmpty()) - .map(execution -> new PluginWithExecutionId(execution, - execution.getMetisPlugins().get(0))).findFirst().orElse(null); - } - - private void verifyEnumSetIsValidAndNotEmpty(Set> set) { - if (set == null || set.isEmpty() || set.stream().anyMatch(Objects::isNull)) { - throw new IllegalArgumentException(); - } - } - - /** - * Get all WorkflowExecutions paged. - * - * @param datasetIds a set of dataset identifiers to filter, can be empty or null to get all - * @param workflowStatuses a set of workflow statuses to filter, can be empty or null - * @param orderField the field to be used to sort the results - * @param ascending a boolean value to request the ordering to ascending or descending - * @param nextPage The first page to be served (zero-based) - * @param pageCount How many pages are requested - can be null - * @param ignoreMaxServedExecutionsLimit whether this method is to apply the limit on the number of executions are served. Be - * careful when setting this to true. - * @return a list of all the WorkflowExecutions found - */ - public ResultList getAllWorkflowExecutions(Set datasetIds, - Set workflowStatuses, DaoFieldNames orderField, boolean ascending, - int nextPage, Integer pageCount, boolean ignoreMaxServedExecutionsLimit) { - - // Prepare pagination and check that there is something to query - final Pagination pagination = createPagination(nextPage, pageCount, ignoreMaxServedExecutionsLimit); - if (pagination.getLimit() < 1) { - return createResultList(Collections.emptyList(), pagination); - } - - // Create query - final Query query = - morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class); - - // Set dataset ID and workflow status limitations. - if (datasetIds != null && !datasetIds.isEmpty()) { - query.filter(Filters.in(DATASET_ID.getFieldName(), datasetIds)); - } - if (!CollectionUtils.isEmpty(workflowStatuses)) { - query.filter(Filters.in(WORKFLOW_STATUS.getFieldName(), workflowStatuses)); - } - - // Execute query with correct pagination - final FindOptions findOptions = new FindOptions().skip(pagination.getSkip()) - .limit(pagination.getLimit()); - - // Set ordering - if (orderField != null) { - if (ascending) { - findOptions.sort(dev.morphia.query.Sort.ascending(orderField.getFieldName())); - } else { - findOptions.sort(dev.morphia.query.Sort.descending(orderField.getFieldName())); - } - } - - final List result = MorphiaUtils.getListOfQueryRetryable(query, findOptions); - return createResultList(result, pagination); - } - - /** - * Get an overview of all WorkflowExecutions. This returns a list of executions ordered to display an overview. First the ones - * in queue, then those in progress and then those that are finalized. Within these categories they will be sorted by creation - * date (most recent first). This method does support pagination. - *

- * TODO when we migrate - * to mongo 3.4 or later, we can do this easier with new aggregation pipeline stages and - * operators. The main improvements are 1) to try to map the root to the 'execution' variable so - * that we don't have to look it up afterwards, and 2) to use $addFields with $switch to add the - * statusIndex instead of having to go through creating and subtracting the two temporary fields. - * - * @param datasetIds a set of dataset identifiers to filter, can be empty or null to get all - * @param pluginStatuses the plugin statuses to filter. Can be null. - * @param pluginTypes the plugin types to filter. Can be null. - * @param fromDate the date from where the results should start. Can be null. - * @param toDate the date to where the results should end. Can be null. - * @param nextPage the nextPage token - * @param pageCount the number of pages that are requested - * @return a list of all the WorkflowExecutions found. Is not null. - */ - public ResultList getWorkflowExecutionsOverview(Set datasetIds, - Set pluginStatuses, Set pluginTypes, Date fromDate, Date toDate, - int nextPage, int pageCount) { - return getWorkflowExecutionsOverview(datasetIds, pluginStatuses, pluginTypes, fromDate, toDate, - createPagination(nextPage, pageCount, false)); - } - - ResultList getWorkflowExecutionsOverview(Set datasetIds, - Set pluginStatuses, Set pluginTypes, Date fromDate, Date toDate, - Pagination pagination) { - - return retryableExternalRequestForNetworkExceptions(() -> { - - // Prepare pagination and check that there is something to query - if (pagination.getLimit() < 1) { - return createResultList(Collections.emptyList(), pagination); - } - - // Create the aggregate pipeline - final Aggregation aggregation = morphiaDatastoreProvider.getDatastore() - .aggregate(WorkflowExecution.class); - - // Step 1: create filter to match - final Filter filter = createFilter(datasetIds, pluginStatuses, pluginTypes, fromDate, toDate); - aggregation.match(filter); - - // Step 2: determine status index field - final String statusIndexField = determineOrderingStatusIndex(aggregation); - - // Step 3: Sort - first on the status index, then on the createdDate. - aggregation - .sort(Sort.sort().ascending(statusIndexField).descending(CREATED_DATE.getFieldName())); - - // Step 4: Apply pagination - aggregation.skip(pagination.getSkip()).limit(pagination.getLimit()); - - // Step 5: Create join of dataset and execution to combine the data information - joinDatasetAndWorkflowExecution(aggregation); - - // Done: execute and return result. - final List result = MorphiaUtils - .getListOfAggregationRetryable(aggregation, - ExecutionDatasetPair.class); - return createResultList(result, pagination); - }); - } - - private Filter createFilter(Set datasetIds, Set pluginStatuses, - Set pluginTypes, Date fromDate, Date toDate) { - List elemMatchFilters = new ArrayList<>(); - if (!CollectionUtils.isEmpty(pluginTypes)) { - elemMatchFilters.add(Filters.in(PLUGIN_TYPE.getFieldName(), pluginTypes)); - } - if (!CollectionUtils.isEmpty(pluginStatuses)) { - elemMatchFilters.add(Filters.in(PLUGIN_STATUS.getFieldName(), pluginStatuses)); - } - if (fromDate != null) { - elemMatchFilters.add(Filters.gte(STARTED_DATE.getFieldName(), fromDate)); - } - if (toDate != null) { - elemMatchFilters.add(Filters.lt(STARTED_DATE.getFieldName(), toDate)); - } - final Filter elemMatchFilter = Filters - .elemMatch(METIS_PLUGINS.getFieldName(), elemMatchFilters.toArray(Filter[]::new)); - - final Filter collectedFilters; - if (CollectionUtils.isEmpty(datasetIds)) { - collectedFilters = elemMatchFilter; - } else { - final Filter datasetIdFilter = Filters.in(DATASET_ID.getFieldName(), datasetIds); - collectedFilters = Filters.and(elemMatchFilter, datasetIdFilter); - } - return collectedFilters; - } - - private String determineOrderingStatusIndex(Aggregation aggregation) { - // Step 1: Add specific positions when the status is INQUEUE or RUNNING. - final String statusInQueueField = "statusInQueue"; - final String statusRunningField = "statusRunning"; - final Expression inqueueCheckExpression = ComparisonExpressions - .eq(Expressions.field(WORKFLOW_STATUS.getFieldName()), - Expressions.value(WorkflowStatus.INQUEUE.name())); - final Expression inqueueConditionExpression = ConditionalExpressions - .condition(inqueueCheckExpression, Expressions.value(INQUEUE_POSITION_IN_OVERVIEW), - Expressions.value(0)); - final Expression runningCheckExpression = ComparisonExpressions - .eq(Expressions.field(WORKFLOW_STATUS.getFieldName()), - Expressions.value(WorkflowStatus.RUNNING.name())); - final Expression runningConditionExpression = ConditionalExpressions - .condition(runningCheckExpression, Expressions.value(RUNNING_POSITION_IN_OVERVIEW), - Expressions.value(0)); - - aggregation.project(Projection.project() - .include(statusInQueueField, inqueueConditionExpression) - .include(statusRunningField, runningConditionExpression) - .include(CREATED_DATE.getFieldName()) - .include(DATASET_ID.getFieldName())); - - // Step 2: Copy specific positions to final variable: use default position if no position is set. - final String statusIndexField = "statusIndex"; - - final MathExpression sumExpression = MathExpressions - .add(Expressions.field(statusInQueueField), Expressions.field(statusRunningField)); - final Expression sumCheckExpression = ComparisonExpressions - .eq(sumExpression, Expressions.value(0)); - final Expression statusIndexExpression = ConditionalExpressions - .condition(sumCheckExpression, Expressions.value(DEFAULT_POSITION_IN_OVERVIEW), - sumExpression); - - aggregation.project(Projection.project() - .include(statusIndexField, statusIndexExpression) - .include(CREATED_DATE.getFieldName()) - .include(DATASET_ID.getFieldName())); - - return statusIndexField; - } - - private void joinDatasetAndWorkflowExecution(Aggregation aggregation) { - // Step 1: Join with the dataset and the execution - final String datasetListField = "datasetList"; - final String executionListField = "executionList"; - aggregation.lookup(Lookup.lookup(Dataset.class).localField(DATASET_ID.getFieldName()) - .foreignField(DATASET_ID.getFieldName()).as(datasetListField)); - aggregation.lookup(Lookup.lookup(WorkflowExecution.class).localField(ID.getFieldName()) - .foreignField(ID.getFieldName()).as(executionListField)); - - // Step 2: Keep only the first entry in the dataset and execution lists. - final String datasetField = "dataset"; - final String executionField = "execution"; - final Projection projection = Projection.project() - .include(datasetField, - ArrayExpressions.elementAt(Expressions.field(datasetListField), - Expressions.value(0))) - .include(executionField, ArrayExpressions - .elementAt(Expressions.field(executionListField), Expressions.value(0))) - .suppressId(); - aggregation.project(projection); - } - - /** - * This object contains a pair consisting of a dataset and an execution. It is meant to be a result of aggregate queries, so the - * field names cannot easily be changed. - *

Annotation {@link Entity} required so that morphia can handle the aggregations.

- */ - @Entity - public static class ExecutionDatasetPair { - - private Dataset dataset; - private WorkflowExecution execution; - - public ExecutionDatasetPair() { - } - - /** - * Constructor. - * - * @param dataset The dataset. - * @param execution The execution. - */ - public ExecutionDatasetPair(Dataset dataset, WorkflowExecution execution) { - this.dataset = dataset; - this.execution = execution; - } - - public Dataset getDataset() { - return dataset; - } - - public WorkflowExecution getExecution() { - return execution; - } - } - - /** - * The number of WorkflowExecutions that would be returned if a get all request would be performed. - * - * @return the number representing the size during a get all request - */ - public int getWorkflowExecutionsPerRequest() { - synchronized (this) { - return workflowExecutionsPerRequest; - } - } - - /** - * Set the number of WorkflowExecutions that would be returned if a get all request would be performed. - * - * @param workflowExecutionsPerRequest the number to set to - */ - public void setWorkflowExecutionsPerRequest(int workflowExecutionsPerRequest) { - synchronized (this) { - this.workflowExecutionsPerRequest = workflowExecutionsPerRequest; - } - } - - /** - * Get the maximum number of workflow executions that are served (regardless of pagination). - * - * @return The limit. - */ - public int getMaxServedExecutionListLength() { - synchronized (this) { - return maxServedExecutionListLength; - } - } - - /** - * Set the maximum number of workflowExecutions that are served (regardless of pagination). - * - * @param maxServedExecutionListLength The limit. - */ - public void setMaxServedExecutionListLength(int maxServedExecutionListLength) { - synchronized (this) { - this.maxServedExecutionListLength = maxServedExecutionListLength; - } - } - - /** - * Check if a WorkflowExecution using an execution identifier is {@link WorkflowStatus#CANCELLED} - * - * @param id the execution identifier - * @return true for cancelled, false for not cancelled - */ - public boolean isCancelled(ObjectId id) { - WorkflowExecution workflowExecution = retryableExternalRequestForNetworkExceptions(() -> - morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), id)) - .first(new FindOptions().projection().include(WORKFLOW_STATUS.getFieldName()))); - return workflowExecution != null - && workflowExecution.getWorkflowStatus() == WorkflowStatus.CANCELLED; - } - - /** - * Check if a WorkflowExecution using an execution identifier is in a cancelling state. The state before finally being - * {@link WorkflowStatus#CANCELLED} - * - * @param id the execution identifier - * @return true for cancelling, false for not cancelling - */ - public boolean isCancelling(ObjectId id) { - WorkflowExecution workflowExecution = retryableExternalRequestForNetworkExceptions( - () -> morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class) - .filter(Filters.eq(ID.getFieldName(), id)) - .first(new FindOptions().projection().include("cancelling"))); - return workflowExecution != null && workflowExecution.isCancelling(); - } - - /** - * Delete all WorkflowExecutions for a dataset identifier - * - * @param datasetId the dataset identifier - * @return true if at least one was removed - */ - public boolean deleteAllByDatasetId(String datasetId) { - Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - DeleteResult deleteResult = retryableExternalRequestForNetworkExceptions( - () -> query.delete(new DeleteOptions().multi(true))); - LOGGER.debug("WorkflowExecution with datasetId: {}, deleted from Mongo", datasetId); - return (deleteResult == null ? 0 : deleteResult.getDeletedCount()) >= 1; - } - - /** - * This method retrieves the workflow execution of which the task with the given ID is a subtask. - * - * @param externalTaskId The external task ID that is to be queried. - * @return The workflow execution. - */ - public WorkflowExecution getByExternalTaskId(long externalTaskId) { - // TODO JV Validation is disabled because otherwise it complains that the subquery is looking in a - // list of AbstractMetisPlugin objects that don't have the "externalTaskId" property being queried. - final Query query = morphiaDatastoreProvider.getDatastore() - .find(WorkflowExecution.class).disableValidation(); - query.filter(Filters.elemMatch(METIS_PLUGINS.getFieldName(), - Filters.eq("externalTaskId", Long.toString(externalTaskId)))); - return retryableExternalRequestForNetworkExceptions(query::first); - } - - /** - * This method retrieves the workflow execution that contains a subtask satisfying the given parameters. - * - * @param plugin The plugin ID representing the subtask. - * @param datasetId The dataset ID of the workflow execution. - * @return The workflow execution. - */ - public WorkflowExecution getByTaskExecution(ExecutedMetisPluginId plugin, String datasetId) { - - // Create subquery to find the correct plugin. - List elemMatchFilters = new ArrayList<>(); - elemMatchFilters.add(Filters.eq(STARTED_DATE.getFieldName(), plugin.getPluginStartedDate())); - elemMatchFilters.add(Filters.eq(PLUGIN_TYPE.getFieldName(), plugin.getPluginType())); - - // Create query to find workflow execution - final Query query = - morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class); - query.filter(Filters.eq(DATASET_ID.getFieldName(), datasetId)); - query.filter(Filters.elemMatch(METIS_PLUGINS.getFieldName(), - elemMatchFilters.toArray(Filter[]::new))); - return retryableExternalRequestForNetworkExceptions(query::first); - } - - /** - * This method retrieves any {@link WorkflowExecution} that matches the provided {@code xsltId}. - * - * @param xsltId the xslt id - * @return the matched workflow execution - */ - public WorkflowExecution getAnyByXsltId(String xsltId) { - // Create query to find workflow execution - final Query query = - morphiaDatastoreProvider.getDatastore().find(WorkflowExecution.class) - .disableValidation(); - query.disableValidation().filter(Filters.elemMatch(METIS_PLUGINS.getFieldName(), - Filters.eq(PLUGIN_METADATA.getFieldName() + "." + XSLT_ID.getFieldName(), xsltId))); - return retryableExternalRequestForNetworkExceptions(query::first); - } - - Pagination createPagination(int firstPage, Integer pageCount, - boolean ignoreMaxServedExecutionsLimit) { - - // Compute the total number (including skipped pages) - final int pageSize = getWorkflowExecutionsPerRequest(); - final int maxResultCount = - ignoreMaxServedExecutionsLimit ? Integer.MAX_VALUE : getMaxServedExecutionListLength(); - int total = maxResultCount; //Default value if no pageCount supplied - if (pageCount != null) { - total = Math.min((firstPage + pageCount) * pageSize, maxResultCount); - } - - // Compute the skipped result count and the returned result count (limit). - final int skip = firstPage * pageSize; - final boolean maxRequested = total == maxResultCount; - final int limit = Math.max(total - skip, 0); - return new Pagination(skip, limit, maxRequested); - } - - static class Pagination { - - private final int skip; - private final int limit; - private final boolean maxRequested; - - private Pagination(int skip, int limit, boolean maxRequested) { - this.skip = skip; - this.limit = limit; - this.maxRequested = maxRequested; - } - - int getSkip() { - return skip; - } - - int getLimit() { - return limit; - } - - boolean isMaxReached(int resultSize) { - return maxRequested && resultSize == limit; - } - } - - private static ResultList createResultList(List result, Pagination pagination) { - return new ResultList<>(result, pagination.isMaxReached(result.size())); - } - - /** - * This object contains a result list with some pagination information. - * - * @param The type of the result objects. - */ - public static class ResultList { - - private final List results; - private final boolean maxResultCountReached; - - /** - * Constructor. - * - * @param results The results. - * @param maxResultCountReached Whether the maximum result count has been reached (indicating whether next pages will be - * served). - */ - public ResultList(List results, boolean maxResultCountReached) { - this.results = new ArrayList<>(results); - this.maxResultCountReached = maxResultCountReached; - } - - public List getResults() { - return Collections.unmodifiableList(results); - } - - public boolean isMaxResultCountReached() { - return maxResultCountReached; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowValidationUtils.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowValidationUtils.java deleted file mode 100644 index 2d44d5c88f..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/dao/WorkflowValidationUtils.java +++ /dev/null @@ -1,222 +0,0 @@ -package eu.europeana.metis.core.dao; - -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.utils.CommonStringValues; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.EnumSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import org.apache.hc.core5.net.URIBuilder; -import org.springframework.util.CollectionUtils; - -/** - * This class is a utility class that can answer questions related to the validation of workflows. - */ -public class WorkflowValidationUtils { - - private final DepublishRecordIdDao depublishRecordIdDao; - private final DataEvolutionUtils dataEvolutionUtils; - - /** - * Constructor. - * - * @param depublishRecordIdDao the depublication record id dao - * @param dataEvolutionUtils The utilities class for sorting out data evolution - */ - public WorkflowValidationUtils(DepublishRecordIdDao depublishRecordIdDao, - DataEvolutionUtils dataEvolutionUtils) { - this.depublishRecordIdDao = depublishRecordIdDao; - this.dataEvolutionUtils = dataEvolutionUtils; - } - - /** - * This method validates the workflow plugin sequence. In particular, it checks: - *
    - *
  1. That the workflow is not empty and contains plugins with valid types,
  2. - *
  3. That the first plugin is not link checking (except when it is the only plugin),
  4. - *
  5. That no two plugins of the same type occur in the workflow,
  6. - *
  7. That if depublish is enabled no other plugins are allowed in the workflow,
  8. - *
  9. That the first plugin has a valid predecessor plugin in the dataset's history (as defined by - * {@link DataEvolutionUtils#getPredecessorTypes(ExecutablePluginType)}), the type of which can be - * overridden by the enforced predecessor type, and the root plugin (i.e. harvest) of which is - * equal to the latest successful harvest (i.e. no old data should be processed after new data has - * been introduced),
  10. - *
  11. That all subsequent plugins have a valid predecessor within the workflow (as defined by - * {@link DataEvolutionUtils#getPredecessorTypes(ExecutablePluginType)}),
  12. - *
  13. That harvesting plugins have valid URL settings.
  14. - *
- * - * @param workflow The workflow to validate. - * @param enforcedPredecessorType If not null, overrides the predecessor type of the first plugin. - * @return The predecessor of the first plugin. Or null if no predecessor is required. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} In case the workflow is empty, or contains plugins with - * invalid types.
  • - *
  • {@link PluginExecutionNotAllowed} in case the plugin sequence as provided is not - * allowed.
  • - *
- */ - public PluginWithExecutionId validateWorkflowPlugins(Workflow workflow, - ExecutablePluginType enforcedPredecessorType) throws GenericMetisException { - - // Workflow should have a plugin list. - List metisPluginsMetadata = workflow.getMetisPluginsMetadata(); - - if (metisPluginsMetadata == null) { - throw new BadContentException("Workflow should not be empty."); - } - - // Compile the list of enabled plugins. - final List enabledPlugins = metisPluginsMetadata - .stream().filter(ExecutablePluginMetadata::isEnabled).toList(); - - // Workflow should not be empty and all should have a type. - if (enabledPlugins.isEmpty()) { - throw new BadContentException("Workflow should not be empty."); - } - if (enabledPlugins.stream().map(AbstractExecutablePluginMetadata::getExecutablePluginType) - .anyMatch(Objects::isNull)) { - throw new BadContentException( - "There are enabled plugins of which the type could not be determined."); - } - - // Validate dataset/record depublication - validateDepublishPlugin(workflow.getDatasetId(), enabledPlugins); - - // Validate and normalize the harvest parameters of harvest plugins (even if not enabled) - validateAndTrimHarvestParameters(workflow.getDatasetId(), enabledPlugins); - - // Check that first plugin is not link checking (except if it is the only plugin) - if (enabledPlugins.size() > 1 - && enabledPlugins.getFirst().getPluginType() == PluginType.LINK_CHECKING) { - throw new PluginExecutionNotAllowed(CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED); - } - - // Make sure that all enabled plugins (except the first) have a predecessor within the workflow. - final EnumSet previousTypesInWorkflow = EnumSet - .of(enabledPlugins.getFirst().getExecutablePluginType()); - for (int i = 1; i < enabledPlugins.size(); i++) { - - // Find the permissible predecessors - final ExecutablePluginType pluginType = enabledPlugins.get(i).getExecutablePluginType(); - final Set permissiblePredecessors = DataEvolutionUtils - .getPredecessorTypes(pluginType); - - // Check if we have the right predecessor plugin types in the workflow - final boolean hasNoPredecessor = !permissiblePredecessors.isEmpty() && - permissiblePredecessors.stream().noneMatch(previousTypesInWorkflow::contains); - if (hasNoPredecessor) { - throw new PluginExecutionNotAllowed(CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED); - } - - // Add the plugin type to those we have seen - previousTypesInWorkflow.add(pluginType); - } - - // We should now have seen all types. Make sure that there are no duplicates - if (previousTypesInWorkflow.size() != enabledPlugins.size()) { - throw new PluginExecutionNotAllowed(CommonStringValues.PLUGIN_EXECUTION_NOT_ALLOWED); - } - - // Check the presence of the predecessor and return it. - return dataEvolutionUtils - .computePredecessorPlugin(enabledPlugins.getFirst().getExecutablePluginType(), - enforcedPredecessorType, workflow.getDatasetId()); - } - - private void validateAndTrimHarvestParameters(String datasetId, - Iterable enabledPlugins) throws BadContentException { - for (AbstractExecutablePluginMetadata pluginMetadata : enabledPlugins) { - if (pluginMetadata instanceof OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata) { - final URI validatedUri = validateUrl(oaipmhHarvestPluginMetadata.getUrl()); - oaipmhHarvestPluginMetadata - .setUrl(new URIBuilder(validatedUri).removeQuery().setFragment(null).toString()); - oaipmhHarvestPluginMetadata.setMetadataFormat(oaipmhHarvestPluginMetadata.getMetadataFormat() == null ? null - : oaipmhHarvestPluginMetadata.getMetadataFormat().trim()); - oaipmhHarvestPluginMetadata.setSetSpec( - oaipmhHarvestPluginMetadata.getSetSpec() == null ? null : oaipmhHarvestPluginMetadata.getSetSpec().trim()); - } - if (pluginMetadata instanceof HTTPHarvestPluginMetadata httpHarvestPluginMetadata) { - httpHarvestPluginMetadata.setUrl(validateUrl(httpHarvestPluginMetadata.getUrl()).toString()); - } - if (pluginMetadata instanceof AbstractHarvestPluginMetadata abstractHarvestPluginMetadata) { - if (abstractHarvestPluginMetadata.isIncrementalHarvest() && !isIncrementalHarvestingAllowed(datasetId)) { - throw new BadContentException("Can't perform incremental harvesting for this dataset."); - } - } - } - } - - /** - * This method returns whether currently it is permitted/possible to perform incremental harvesting for the given dataset. - * - * @param datasetId The ID of the dataset for which to check. - * @return Whether we can perform incremental harvesting for the dataset. - */ - public boolean isIncrementalHarvestingAllowed(String datasetId) { - // We need to do the entire analysis to make sure that all publish actions are consistent. - return !CollectionUtils.isEmpty(dataEvolutionUtils.getPublishedHarvestIncrements(datasetId)); - } - - private void validateDepublishPlugin(String datasetId, - List enabledPlugins) throws BadContentException { - // If depublish requested, make sure it's the only plugin in the workflow - final Optional depublishPluginMetadata = enabledPlugins.stream() - .filter(plugin -> - plugin.getExecutablePluginType() - .toPluginType() - == PluginType.DEPUBLISH) - .map(DepublishPluginMetadata.class::cast) - .findFirst(); - if (enabledPlugins.size() > 1 && depublishPluginMetadata.isPresent()) { - throw new BadContentException( - "If DEPUBLISH plugin enabled, no other enabled plugins are allowed."); - } - - // If record depublication requested, check if there are pending record ids in the db - if (depublishPluginMetadata.isPresent() && !depublishPluginMetadata.get() - .isDatasetDepublish()) { - final Set pendingDepublicationIds = depublishRecordIdDao - .getAllDepublishRecordIdsWithStatus(datasetId, - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.PENDING_DEPUBLICATION); - if (CollectionUtils.isEmpty(pendingDepublicationIds)) { - throw new BadContentException( - "Record depublication requested but there are no pending depublication record ids in the db"); - } - } - } - - private static URI validateUrl(String urlString) throws BadContentException { - if (urlString == null) { - throw new BadContentException("Harvesting parameters are missing"); - } - try { - final URI uri = new URI(urlString.trim()); - uri.toURL(); - return uri; - } catch (MalformedURLException | URISyntaxException | IllegalArgumentException e) { - throw new BadContentException("Harvesting parameters are invalid", e); - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/PersistenceProvider.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/PersistenceProvider.java deleted file mode 100644 index 882e4c080f..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/PersistenceProvider.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.europeana.metis.core.execution; - -import com.rabbitmq.client.Channel; -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import org.redisson.api.RedissonClient; - -class PersistenceProvider { - - private final Channel rabbitmqPublisherChannel; - private final Channel rabbitmqConsumerChannel; - private final SemaphoresPerPluginManager semaphoresPerPluginManager; - private final WorkflowExecutionDao workflowExecutionDao; - private final WorkflowPostProcessor workflowPostProcessor; - private final RedissonClient redissonClient; - private final DpsClient dpsClient; - - PersistenceProvider(Channel rabbitmqPublisherChannel, Channel rabbitmqConsumerChannel, - SemaphoresPerPluginManager semaphoresPerPluginManager, - WorkflowExecutionDao workflowExecutionDao, WorkflowPostProcessor workflowPostProcessor, - RedissonClient redissonClient, DpsClient dpsClient) { - this.rabbitmqPublisherChannel = rabbitmqPublisherChannel; - this.rabbitmqConsumerChannel = rabbitmqConsumerChannel; - this.semaphoresPerPluginManager = semaphoresPerPluginManager; - this.workflowExecutionDao = workflowExecutionDao; - this.workflowPostProcessor = workflowPostProcessor; - this.redissonClient = redissonClient; - this.dpsClient = dpsClient; - } - - public SemaphoresPerPluginManager getSemaphoresPerPluginManager() { - return semaphoresPerPluginManager; - } - - WorkflowExecutionDao getWorkflowExecutionDao() { - return workflowExecutionDao; - } - - public WorkflowPostProcessor getWorkflowPostProcessor() { - return workflowPostProcessor; - } - - DpsClient getDpsClient() { - return dpsClient; - } - - RedissonClient getRedissonClient() { - return redissonClient; - } - - public Channel getRabbitmqPublisherChannel() { - return rabbitmqPublisherChannel; - } - - public Channel getRabbitmqConsumerChannel() { - return rabbitmqConsumerChannel; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/QueueConsumer.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/QueueConsumer.java deleted file mode 100644 index 50f455ea45..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/QueueConsumer.java +++ /dev/null @@ -1,223 +0,0 @@ -package eu.europeana.metis.core.execution; - -import com.rabbitmq.client.AMQP; -import com.rabbitmq.client.AMQP.BasicProperties; -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.DefaultConsumer; -import com.rabbitmq.client.Envelope; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Class that handles the initializing connection to the RabbitMQ distributed queue and handling the - * consuming of items from the queue, through the implemented {@link #handleDelivery(String, - * Envelope, BasicProperties, byte[])} method. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-13 - */ -public class QueueConsumer extends DefaultConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(QueueConsumer.class); - - private final WorkflowExecutionSettings workflowExecutionSettings; - private final WorkflowExecutorManager workflowExecutorManager; - private final WorkflowExecutionMonitor workflowExecutionMonitor; - - private final ExecutorService threadPool; - private final ExecutorCompletionService> completionService; - private int threadsCounter; - - /** - * Constructor with all required parameters to initialize the consumer connection to the - * distributed queue and initialize the execution pool - * - * @param rabbitmqConsumerChannel the consumer channel of the queue - * @param rabbitmqQueueName the queue name - * @param workflowExecutionSettings the object that contains execution related settings - * @param workflowExecutorManager the object that contains persistence related objects - * @param workflowExecutionMonitor the object used to monitor executions - * @throws IOException if the consumer channel initialization fails - */ - public QueueConsumer(Channel rabbitmqConsumerChannel, String rabbitmqQueueName, - WorkflowExecutionSettings workflowExecutionSettings, - WorkflowExecutorManager workflowExecutorManager, - WorkflowExecutionMonitor workflowExecutionMonitor) throws IOException { - super(workflowExecutorManager.getRabbitmqConsumerChannel()); - this.workflowExecutionSettings = workflowExecutionSettings; - this.workflowExecutorManager = workflowExecutorManager; - threadPool = Executors.newCachedThreadPool(); - completionService = new ExecutorCompletionService<>(threadPool); - this.workflowExecutionMonitor = workflowExecutionMonitor; - - // For correct priority. Keep in mind this pre-fetches a message before going into - // handleDelivery - rabbitmqConsumerChannel.basicQos(1); - // Auto acknowledge false(second parameter) because of Qos. - rabbitmqConsumerChannel.basicConsume(rabbitmqQueueName, false, this); - } - - /** - * Handles each consumed message from the queue. - *

- * Does not run as a thread. Each execution will run separately one after the other for each - * consumption. Make sure that if an exception occurs from mongo connections, the related - * "current" execution is safe to not be processed in this run and will be picked up on a later - * stage. See also the configuration of the related {@link com.rabbitmq.client.ConnectionFactory}. - *

- *

- * Each message consumed is a workflow execution identifier which is used to claim and retrieve a - * {@link WorkflowExecution} from the database. That workflow execution is then provided to a - * {@link WorkflowExecutor} which is a {@link java.util.concurrent.Callable} and is in turn - * submitted to the {@link ExecutorCompletionService} in this class. Every message retrieved is - * ACked and therefore removed from the queue. - *

- *

- * Cleanup and identification of submitted and finished tasks is controlled in the method {@link - * #checkAndCleanCompletionService}. This method SHOULD be ran periodically from wherever - * an instance of this class is used. It is not thread safe. - *

- * - * @param consumerTag the consumer tage - * @param rabbitmqEnvelope the rabbitmq envelope - * @param properties the queue properties - * @param body the body of the consumed message - * @throws IOException if an exception occurred while sending an ACK back to the queue - */ - @Override - public void handleDelivery(String consumerTag, Envelope rabbitmqEnvelope, - AMQP.BasicProperties properties, byte[] body) throws IOException { - String objectId = new String(body, StandardCharsets.UTF_8); - LOGGER.info("workflowExecutionId: {} - Received from queue.", objectId); - LOGGER.info("workflowExecutionId: {} - Claiming workflow execution", objectId); - Pair workflowExecutionClaimedPair = workflowExecutionMonitor - .claimExecution(objectId); - WorkflowExecution workflowExecution = workflowExecutionClaimedPair.getLeft(); - final Boolean workflowClaimed = workflowExecutionClaimedPair.getRight(); - - try { - if (workflowExecution == null) { - // This execution no longer exists and we need to ignore it. - LOGGER.warn("workflowExecutionId: {} - Was in queue but no longer exists.", objectId); - } else if (workflowClaimed.equals(Boolean.TRUE)) { - LOGGER.info("workflowExecutionId: {} - Claimed", workflowExecution.getId()); - handleClaimedExecution(workflowExecution); - } else if (workflowClaimed.equals(Boolean.FALSE) - && WorkflowExecutionMonitor.CLAIMABLE_STATUSES - .contains(workflowExecution.getWorkflowStatus())) { - LOGGER.info("workflowExecutionId: {} - Could not be claimed, discarding message", - workflowExecution.getId()); - } else { - LOGGER - .info("workflowExecutionId: {} - Does not have a claimable status, discarding message", - workflowExecution.getId()); - } - } finally { - sendAck(rabbitmqEnvelope, objectId); - } - } - - private void handleClaimedExecution(WorkflowExecution workflowExecution) { - try { - if (workflowExecution.isCancelling()) { - // Has been cancelled, do not execute - workflowExecution.setWorkflowAndAllQualifiedPluginsToCancelled(); - workflowExecutorManager.getWorkflowExecutionDao().update(workflowExecution); - LOGGER.info("workflowExecutionId: {} - Cancelled", workflowExecution.getId()); - } else { - submitExecution(workflowExecution); - } - } catch (RuntimeException e) { - LOGGER.error(String.format( - "workflowExecutionId: %s - Exception occurred during submitting message from queue", - workflowExecution.getId()), e); - } - } - - private void submitExecution(WorkflowExecution workflowExecution) { - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, - workflowExecutorManager, workflowExecutionSettings); - completionService.submit(workflowExecutor); - threadsCounter++; - } - - private void sendAck(Envelope rabbitmqEnvelope, String objectId) throws IOException { - // Send ACK back to remove from queue asap. - super.getChannel().basicAck(rabbitmqEnvelope.getDeliveryTag(), false); - LOGGER.debug("workflowExecutionId: {} - ACK sent with tag {}", objectId, - rabbitmqEnvelope.getDeliveryTag()); - } - - /** - * Polls the completion service until there isn't any result returned. - *

Based on the result returned from the completion service, this method will decide if a - * workflow execution should be sent back to the queue.

- *

This method SHOULD be ran periodically from - * wherever an instance of this class is used. It is not thread safe.

- * - * @throws InterruptedException if the execution of this method was interrupted - */ - public void checkAndCleanCompletionService() throws InterruptedException { - LOGGER.debug("Check if we have a task that has finished, threadsCounter: {}", threadsCounter); - Future> userWorkflowExecutionFuture = completionService.poll(); - while (userWorkflowExecutionFuture != null) { - threadsCounter--; - try { - checkCollectedWorkflowExecution(userWorkflowExecutionFuture.get()); - } catch (ExecutionException e) { - LOGGER.warn("Exception occurred in Future task", e); - } - userWorkflowExecutionFuture = completionService.poll(); - } - - } - - /** - * Checks if the workflow execution was run as expected. - *

- * If one of the plugins was not allowed to run therefore the workflow execution did not complete - * as a whole then we are resending the execution identifier back to the queue. If this execution - * needs to be prioritized then the priority should be updated inside the {@link - * java.util.concurrent.Callable} - *

- * - * @param workflowExecutionRanFlagPair the workflow execution future - */ - private void checkCollectedWorkflowExecution( - Pair workflowExecutionRanFlagPair) { - final WorkflowExecution workflowExecution = workflowExecutionRanFlagPair.getLeft(); - if (workflowExecution != null) { - boolean wasExecutionClaimedAndPluginRan = workflowExecutionRanFlagPair.getRight(); - //If a plugin did not run, we are sending it back to queue so another instance can pick it up - if (wasExecutionClaimedAndPluginRan) { - LOGGER.info("workflowExecutionId: {} - Task finished", workflowExecution.getId()); - } else { - LOGGER.info("workflowExecutionId: {} - Sent to queue because execution could " - + "not be claimed or plugin could not run in this instance", workflowExecution.getId()); - workflowExecutorManager.addWorkflowExecutionToQueue(workflowExecution.getId().toString(), - workflowExecution.getWorkflowPriority()); - } - } - } - - /** - * Close resources - */ - public void close() { - //Interrupt running threads - threadPool.shutdownNow(); - } - - int getThreadsCounter() { - return threadsCounter; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SchedulerExecutor.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SchedulerExecutor.java deleted file mode 100644 index e63ba7048e..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SchedulerExecutor.java +++ /dev/null @@ -1,219 +0,0 @@ -package eu.europeana.metis.core.execution; - -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.service.ScheduleWorkflowService; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.exception.GenericMetisException; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.redisson.client.RedisConnectionException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Class that is responsible for scheduling executions. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-09-27 - */ -public class SchedulerExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerExecutor.class); - - private final OrchestratorService orchestratorService; - private final ScheduleWorkflowService scheduleWorkflowService; - private final RedissonClient redissonClient; - private static final String SCHEDULER_LOCK = "schedulerLock"; - private LocalDateTime lastExecutionTime = LocalDateTime.now(); - - /** - * Constructs the executor - * - * @param orchestratorService {@link OrchestratorService} - * @param scheduleWorkflowService {@link ScheduleWorkflowService} - * @param redissonClient {@link RedissonClient} - */ - public SchedulerExecutor(OrchestratorService orchestratorService, - ScheduleWorkflowService scheduleWorkflowService, RedissonClient redissonClient) { - this.orchestratorService = orchestratorService; - this.scheduleWorkflowService = scheduleWorkflowService; - this.redissonClient = redissonClient; - } - - /** - * Makes a run to check if there are executions scheduled in a range of dates and if some are - * found it will send them in the distributed queue. It is meant that this method is ran - * periodically. - */ - @SuppressWarnings("squid:S2222") //There is a lock.unlock() code within the `finally` code block, which will be run if an exception is thrown or not - public void performScheduling() { - RLock lock = redissonClient.getFairLock(SCHEDULER_LOCK); - try { - lock.lock(); - final LocalDateTime thisExecutionTime = LocalDateTime.now(); - LOGGER.info("Date range checking lowerbound: {}, upperBound:{}", this.lastExecutionTime, - thisExecutionTime); - List allCleanedScheduledWorkflows = - getCleanedScheduledUserWorkflows(lastExecutionTime, thisExecutionTime); - - for (ScheduledWorkflow scheduledWorkflow : allCleanedScheduledWorkflows) { - LOGGER.info("Adding ScheduledWorkflow with DatasetId: {},pointerDate: {}, frequence: {}", - scheduledWorkflow.getDatasetId(), scheduledWorkflow.getPointerDate(), - scheduledWorkflow.getScheduleFrequence()); - tryAddUserWorkflowInQueueOfUserWorkflowExecutions(scheduledWorkflow); - } - lastExecutionTime = thisExecutionTime; - } catch (RuntimeException e) { - LOGGER.warn( - "Exception thrown from rabbitmq channel or Redis disconnection, scheduler thread continues", - e); - } finally { - try { - lock.unlock(); - } catch (RedisConnectionException e) { - LOGGER.warn("Cannot connect to unlock, scheduler thread continues", e); - } - } - } - - private List getCleanedScheduledUserWorkflows(LocalDateTime lowerBound, - LocalDateTime upperBound) { - List scheduledWorkflows = new ArrayList<>(); - scheduledWorkflows.addAll(getScheduledUserWorkflowsFrequenceOnce(lowerBound, upperBound)); - scheduledWorkflows.addAll(getScheduledUserWorkflowsFrequenceDaily(lowerBound, upperBound)); - scheduledWorkflows.addAll(getScheduledUserWorkflowsFrequenceWeekly(lowerBound, upperBound)); - scheduledWorkflows - .addAll(getScheduledUserWorkflowsFrequenceMonthly(lowerBound, upperBound)); - return scheduledWorkflows; - } - - private List getScheduledUserWorkflowsFrequenceOnce(LocalDateTime lowerBound, - LocalDateTime upperBound) { - int nextPage = 0; - List scheduledWorkflows = new ArrayList<>(); - ResponseListWrapper scheduledUserWorkflowResponseListWrapper = new ResponseListWrapper<>(); - do { - scheduledUserWorkflowResponseListWrapper.clear(); - scheduledUserWorkflowResponseListWrapper - .setResultsAndLastPage(scheduleWorkflowService - .getAllScheduledWorkflowsByDateRangeONCE(lowerBound, upperBound, nextPage), - scheduleWorkflowService.getScheduledWorkflowsPerRequest(), nextPage); - scheduledWorkflows - .addAll(scheduledUserWorkflowResponseListWrapper.getResults()); - nextPage = scheduledUserWorkflowResponseListWrapper.getNextPage(); - } while (nextPage != -1); - return scheduledWorkflows; - } - - private List getScheduledUserWorkflowsFrequenceDaily(LocalDateTime lowerBound, - LocalDateTime upperBound) { - List scheduledWorkflows = getScheduledUserWorkflows( - ScheduleFrequence.DAILY); - for (Iterator iterator = scheduledWorkflows.iterator(); - iterator.hasNext(); ) { - ScheduledWorkflow scheduledWorkflow = iterator.next(); - - LocalDateTime pointerDate = LocalDateTime - .ofInstant(scheduledWorkflow.getPointerDate().toInstant(), ZoneId.systemDefault()); - LocalDateTime localDateToCheck = lowerBound.withYear(lowerBound.getYear()) - .withMonth(lowerBound.getMonthValue()).withHour(pointerDate.getHour()) - .withMinute(pointerDate.getMinute()).withSecond(pointerDate.getSecond()) - .withNano(pointerDate.getNano()); - - if (localDateToCheck.isBefore(lowerBound) || localDateToCheck.isEqual(upperBound) - || localDateToCheck.isAfter(upperBound)) { - iterator.remove(); - } - } - return scheduledWorkflows; - } - - private List getScheduledUserWorkflowsFrequenceWeekly(LocalDateTime lowerBound, - LocalDateTime upperBound) { - List scheduledWorkflows = getScheduledUserWorkflows( - ScheduleFrequence.WEEKLY); - for (Iterator scheduledWorkflowIterator = scheduledWorkflows.iterator(); - scheduledWorkflowIterator.hasNext(); ) { - LocalDateTime localDateToCheck = getLocalDateTimeBasedOnLowerBound(lowerBound, - scheduledWorkflowIterator.next()); - - if (lowerBound.getDayOfWeek() == localDateToCheck.getDayOfWeek()) { - localDateToCheck = localDateToCheck.withDayOfMonth(lowerBound.getDayOfMonth()); - } - - if (localDateToCheck.isBefore(lowerBound) || localDateToCheck.isEqual(upperBound) - || localDateToCheck.isAfter(upperBound)) { - scheduledWorkflowIterator.remove(); - } - } - return scheduledWorkflows; - } - - private List getScheduledUserWorkflowsFrequenceMonthly( - LocalDateTime lowerBound, LocalDateTime upperBound) { - List scheduledWorkflows = getScheduledUserWorkflows( - ScheduleFrequence.MONTHLY); - for (Iterator scheduledWorkflowIterator = scheduledWorkflows.iterator(); - scheduledWorkflowIterator.hasNext(); ) { - LocalDateTime localDateToCheck = getLocalDateTimeBasedOnLowerBound(lowerBound, - scheduledWorkflowIterator.next()); - - if (lowerBound.getDayOfMonth() == localDateToCheck.getDayOfMonth()) { - localDateToCheck = localDateToCheck.withMonth(lowerBound.getMonthValue()); - } - - if (localDateToCheck.isBefore(lowerBound) || localDateToCheck.isEqual(upperBound) - || localDateToCheck.isAfter(upperBound)) { - scheduledWorkflowIterator.remove(); - } - } - return scheduledWorkflows; - } - - private LocalDateTime getLocalDateTimeBasedOnLowerBound(LocalDateTime lowerBound, - ScheduledWorkflow scheduledWorkflow) { - LocalDateTime pointerDate = LocalDateTime - .ofInstant(scheduledWorkflow.getPointerDate().toInstant(), ZoneId.systemDefault()); - return lowerBound.withYear(lowerBound.getYear()) - .withMonth(pointerDate.getMonthValue()).withDayOfMonth(pointerDate.getDayOfMonth()) - .withHour(pointerDate.getHour()) - .withMinute(pointerDate.getMinute()).withSecond(pointerDate.getSecond()) - .withNano(pointerDate.getNano()); - } - - private List getScheduledUserWorkflows( - ScheduleFrequence scheduleFrequence) { - int nextPage = 0; - List scheduledWorkflows = new ArrayList<>(); - ResponseListWrapper scheduledUserWorkflowResponseListWrapper = new ResponseListWrapper<>(); - do { - scheduledUserWorkflowResponseListWrapper.clear(); - scheduledUserWorkflowResponseListWrapper - .setResultsAndLastPage(scheduleWorkflowService - .getAllScheduledWorkflowsWithoutAuthorization(scheduleFrequence, nextPage), - scheduleWorkflowService.getScheduledWorkflowsPerRequest(), nextPage); - scheduledWorkflows - .addAll(scheduledUserWorkflowResponseListWrapper.getResults()); - nextPage = scheduledUserWorkflowResponseListWrapper.getNextPage(); - } while (nextPage != -1); - return scheduledWorkflows; - } - - private void tryAddUserWorkflowInQueueOfUserWorkflowExecutions( - ScheduledWorkflow scheduledWorkflow) { - try { - orchestratorService.addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization( - scheduledWorkflow.getDatasetId(), null, null, scheduledWorkflow.getWorkflowPriority()); - } catch (GenericMetisException e) { - LOGGER.warn("Scheduled execution was not added to queue", e); - } - } -} - diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SemaphoresPerPluginManager.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SemaphoresPerPluginManager.java deleted file mode 100644 index 2af0fcc2e9..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/SemaphoresPerPluginManager.java +++ /dev/null @@ -1,50 +0,0 @@ -package eu.europeana.metis.core.execution; - -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import java.util.Collections; -import java.util.EnumMap; -import java.util.Map; -import java.util.concurrent.Semaphore; - -/** - * Manages a map of {@link ExecutablePluginType} keys and {@link Semaphore} values. - *

Each executable plugin type contains it's own semaphore so that access to those plugin - * types is controlled.

- */ -public class SemaphoresPerPluginManager { - - private final Map unmodifiableMaxThreadsPerPlugin; - - /** - * Constructor that initializes the map with provided number of permissions on the semaphores. - * - * @param permissionsPerSemaphore the permissions for each semaphore - */ - public SemaphoresPerPluginManager(int permissionsPerSemaphore) { - Map maxThreadsPerPlugin = new EnumMap<>( - ExecutablePluginType.class); - for (ExecutablePluginType executablePluginType : ExecutablePluginType.values()) { - maxThreadsPerPlugin.put(executablePluginType, new Semaphore(permissionsPerSemaphore, true)); - } - this.unmodifiableMaxThreadsPerPlugin = Collections.unmodifiableMap(maxThreadsPerPlugin); - } - - /** - * Try acquisition of a semaphore for a provided {@link ExecutablePluginType}. - * - * @param executablePluginType the provided executable plugin type - * @return true if acquisition was successful, false otherwise - */ - public boolean tryAcquireForExecutablePluginType(ExecutablePluginType executablePluginType) { - return unmodifiableMaxThreadsPerPlugin.get(executablePluginType).tryAcquire(); - } - - /** - * Release a permission for a semaphore by {@link ExecutablePluginType}. - * - * @param executablePluginType the executable plugin type to release the permission from - */ - public void releaseForPluginType(ExecutablePluginType executablePluginType) { - unmodifiableMaxThreadsPerPlugin.get(executablePluginType).release(); - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionMonitor.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionMonitor.java deleted file mode 100644 index 9baaddc457..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionMonitor.java +++ /dev/null @@ -1,313 +0,0 @@ -package eu.europeana.metis.core.execution; - -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.redisson.client.RedisConnectionException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class monitors workflow executions. It provides functionality that determines whether a - * running execution is progressing (as opposed to hanging) as well as functionality to deal with - * this eventuality. Two of its methods are meant to be scheduled for a periodical run. - */ -public class WorkflowExecutionMonitor { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutionMonitor.class); - - private static final String FAILSAFE_LOCK = "failsafeLock"; - protected static final Set CLAIMABLE_STATUSES = EnumSet - .of(WorkflowStatus.INQUEUE, WorkflowStatus.RUNNING); - - private final WorkflowExecutionDao workflowExecutionDao; - private final WorkflowExecutorManager workflowExecutorManager; - private final Duration failsafeLeniency; - private final RedissonClient redissonClient; - - /** - * The currently running executions. - **/ - private Map currentRunningExecutions = Collections.emptyMap(); - - /** - * Constructor the executor - * - * @param workflowExecutorManager {@link WorkflowExecutorManager} - * @param workflowExecutionDao {@link WorkflowExecutionDao} - * @param redissonClient {@link RedissonClient} - * @param failsafeLeniency The leniency given to executions to be idle. - */ - public WorkflowExecutionMonitor(WorkflowExecutorManager workflowExecutorManager, - WorkflowExecutionDao workflowExecutionDao, RedissonClient redissonClient, - Duration failsafeLeniency) { - this.failsafeLeniency = failsafeLeniency; - this.workflowExecutionDao = workflowExecutionDao; - this.workflowExecutorManager = workflowExecutorManager; - this.redissonClient = redissonClient; - } - - /* DO NOT CALL THIS METHOD WITHOUT POSSESSING THE LOCK */ - List updateCurrentRunningExecutions() { - - // Get all workflow executions that are currently running - final List allRunningWorkflowExecutions = getWorkflowExecutionsWithStatus( - WorkflowStatus.RUNNING); - - // Go by all running executions and compare them with the data we already have. - final Map newExecutions = new HashMap<>( - allRunningWorkflowExecutions.size()); - for (WorkflowExecution execution : allRunningWorkflowExecutions) { - final WorkflowExecutionEntry currentEntry = getEntry(execution); - final WorkflowExecutionEntry newEntry; - if (currentEntry != null && currentEntry.updateTimeValueIsEqual(execution.getUpdatedDate())) { - // If the known update time has not changed, we keep the entry (the - // timeOfLastUpdateTimeChange property should not change). - newEntry = currentEntry; - } else { - // If we find a change of the known update time, we make a new entry with a new - // timeOfLastUpdateTimeChange value). - newEntry = new WorkflowExecutionEntry(execution.getUpdatedDate()); - } - newExecutions.put(execution.getId().toString(), newEntry); - } - currentRunningExecutions = Collections.unmodifiableMap(newExecutions); - - // Done: return all currently running executions. - return allRunningWorkflowExecutions; - } - - /** - * Makes a run to check if there are running executions hanging and if some are found it will - * re-send them in the distributed queue. To be safe (in case of the queue crashing) we also send - * all executions that are marked as being in the queue to the queue again. This method is meant - * to run periodically. - */ - @SuppressWarnings("squid:S2222") //There is a lock.unlock() code within the `finally` code block, which will be run if an exception is thrown or not - public void performFailsafe() { - RLock lock = redissonClient.getFairLock(FAILSAFE_LOCK); - try { - // Lock for the duration of this scheduled task - lock.lock(); - // Update the execution times. This way we always have the latest values. - final List allRunningWorkflowExecutions = updateCurrentRunningExecutions(); - - // Determine which running executions appear to be hanging. Those we requeue. If an execution - // is running but there is no entry, requeue it just to be safe (this can't happen). - final List toBeRequeued = new ArrayList<>(); - for (WorkflowExecution runningExecution : allRunningWorkflowExecutions) { - final WorkflowExecutionEntry executionEntry = getEntry(runningExecution); - if (executionEntry == null || executionEntry.assumeHanging(failsafeLeniency)) { - toBeRequeued.add(runningExecution); - } - } - - // Get all workflow executions that are currently in queue - they are all to be requeued. - toBeRequeued.addAll(getWorkflowExecutionsWithStatus(WorkflowStatus.INQUEUE)); - - // Requeue executions. - for (WorkflowExecution workflowExecution : toBeRequeued) { - workflowExecutorManager.addWorkflowExecutionToQueue(workflowExecution.getId().toString(), - workflowExecution.getWorkflowPriority()); - } - } catch (RuntimeException e) { - LOGGER.warn( - "Exception thrown from rabbitmq channel or Redis disconnection, failsafe thread continues", - e); - } finally { - try { - lock.unlock(); - } catch (RedisConnectionException e) { - LOGGER.warn("Cannot connect to unlock, failsafe thread continues", e); - } - } - } - - /* DO NOT CALL THIS METHOD WITHOUT POSSESSING THE LOCK */ - List getWorkflowExecutionsWithStatus(WorkflowStatus workflowStatus) { - - // Get all the executions, using paging. - final List workflowExecutions = new ArrayList<>(); - int nextPage = 0; - ResponseListWrapper userWorkflowExecutionResponseListWrapper = new ResponseListWrapper<>(); - do { - userWorkflowExecutionResponseListWrapper.clear(); - final ResultList result = workflowExecutionDao - .getAllWorkflowExecutions(null, EnumSet.of(workflowStatus), DaoFieldNames.ID, true, - nextPage, 1, true); - userWorkflowExecutionResponseListWrapper.setResultsAndLastPage(result.getResults(), - workflowExecutionDao.getWorkflowExecutionsPerRequest(), nextPage, - result.isMaxResultCountReached()); - workflowExecutions.addAll(userWorkflowExecutionResponseListWrapper.getResults()); - nextPage = userWorkflowExecutionResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - // Done. - return workflowExecutions; - } - - /** - * This method determines whether a workflow execution may be started. Executions in queue may - * always be started. Running executions are granted or denied permission according to the - * following rules: - *
    - *
  1. If we know a last change time and it is recent, we assume that a process is already working - * on this permission and permission is denied.
  2. - *
  3. If we know a last change time and it is old, but the database has a different update time, - * permission is denied as this signifies a change (which will be captured by the next run of the - * monitor).
  4. - *
  5. If we don't have a monitor last change time, we assume that this will appear shortly and we - * postpone the decision (by denying it now). This shouldn't happen.
  6. - *
  7. In all other cases permission is granted: the execution is determined to be hanging.
  8. - *
- * - * @param workflowExecutionId The ID of the workflow execution which the caller wishes to claim. - * @return A recent version of the workflow execution if the claim is granted. Null if the claim - * is denied. - */ - public Pair claimExecution(String workflowExecutionId) { - - WorkflowExecution workflowExecution; - boolean claimed = false; - RLock lock = redissonClient.getFairLock(FAILSAFE_LOCK); - // Lock for the duration of this request - lock.lock(); - try { - // Retrieve the most current version of the execution. - workflowExecution = workflowExecutionDao.getById(workflowExecutionId); - - if (workflowExecution != null) { - claimed = mayClaimExecution(workflowExecution); - if (claimed) { - updateClaimedExecution(workflowExecution); - } - } - - } catch (RuntimeException e) { - LOGGER.warn("Exception thrown while claiming workflow execution.", e); - workflowExecution = null; - } finally { - lock.unlock(); - } - return new ImmutablePair<>(workflowExecution, claimed); - } - - private void updateClaimedExecution(WorkflowExecution workflowExecution) { - //Update dates - final Date now = new Date(); - workflowExecution.setUpdatedDate(now); - if (workflowExecution.getWorkflowStatus() != WorkflowStatus.RUNNING) { - workflowExecution.setStartedDate(now); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - } - workflowExecutionDao.updateMonitorInformation(workflowExecution); - } - - /* DO NOT CALL THIS METHOD WITHOUT POSSESSING THE LOCK */ - boolean mayClaimExecution(WorkflowExecution workflowExecution) { - - if (CLAIMABLE_STATUSES.contains(workflowExecution.getWorkflowStatus())) { - //If it's INQUEUE we are directly claiming it - if (workflowExecution.getWorkflowStatus() == WorkflowStatus.INQUEUE) { - return true; - } - - // If it is running, we check whether it is currently hanging. Get the map entry. - final WorkflowExecutionEntry currentExecution = getEntry(workflowExecution); - - // If there is no entry, permission is denied: we assume one will appear shortly. - if (currentExecution == null) { - LOGGER.info( - "workflowExecutionId: {} - Claim denied: wait for scheduled monitoring task to monitor this RUNNING execution.", - workflowExecution.getId()); - return false; - } - - // Grant permission only if the execution appears to be hanging. - final boolean isExecutionHanging = - currentExecution.updateTimeValueIsEqual(workflowExecution.getUpdatedDate()) - && currentExecution.assumeHanging(failsafeLeniency); - if (!isExecutionHanging) { - LOGGER.info( - "workflowExecutionId: {} - Claim denied: {} execution does not (yet) appear to be " - + "hanging.", workflowExecution.getId(), WorkflowStatus.RUNNING); - } - return isExecutionHanging; - } - LOGGER.info("workflowExecutionId: {} - Claim denied: workflow not in {} or {} state.", - workflowExecution.getId(), WorkflowStatus.RUNNING, WorkflowStatus.INQUEUE); - return false; - } - - /* DO NOT CALL THIS METHOD WITHOUT POSSESSING THE LOCK */ - WorkflowExecutionEntry getEntry(WorkflowExecution workflowExecution) { - return currentRunningExecutions.get(workflowExecution.getId().toString()); - } - - static class WorkflowExecutionEntry { - - /** - * This is the date that other core instances may provide. Should be treated as a version - * number: no time calculations should be done with this as the clock may differ from ours. - **/ - private final Instant executionUpdateTime; - - /** - * This is the date on this machine. Can be treated as a time. - **/ - private final Instant timeOfLastUpdateTimeChange; - - public WorkflowExecutionEntry(Date updateTime) { - this.executionUpdateTime = updateTime == null ? null : updateTime.toInstant(); - this.timeOfLastUpdateTimeChange = Instant.now(); - } - - /** - * Determines whether the given update time is equal to the one we know. - * - * @param otherUpdateTime the update time to compare. - * @return Whether it is equal to the one we have in the entry. - */ - public boolean updateTimeValueIsEqual(Date otherUpdateTime) { - Instant otherInstant = otherUpdateTime == null ? null : otherUpdateTime.toInstant(); - return Objects.equals(otherInstant, executionUpdateTime); - } - - /** - * Determines whether this workflow execution is hanging according to the given leniency. It is - * assumed to be hanging if we obtained the last update more than the leniency period ago. - * - * @param leniency The leniency with which to decide whether the execution is hanging. - * @return Whether or not the execution is assumed to be hanging. - */ - public boolean assumeHanging(Duration leniency) { - return getLastValueChange().plus(leniency).isBefore(getNow()); - } - - public Instant getLastValueChange() { - return timeOfLastUpdateTimeChange; - } - - Instant getNow() { - return Instant.now(); - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionSettings.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionSettings.java deleted file mode 100644 index 50b05bb364..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutionSettings.java +++ /dev/null @@ -1,22 +0,0 @@ -package eu.europeana.metis.core.execution; - -import eu.europeana.metis.core.workflow.plugins.ThrottlingValues; - -/** - * These are settings that are all related to the actual execution of workflows, and used mostly by - * the classes {@link WorkflowExecutor} and {@link QueueConsumer}. - */ -interface WorkflowExecutionSettings { - - int getDpsMonitorCheckIntervalInSecs(); - - int getPeriodOfNoProcessedRecordsChangeInMinutes(); - - String getEcloudBaseUrl(); - - String getEcloudProvider(); - - String getMetisCoreBaseUrl(); - - ThrottlingValues getThrottlingValues(); -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutor.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutor.java deleted file mode 100644 index 6fe32471a5..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutor.java +++ /dev/null @@ -1,554 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static java.lang.Thread.currentThread; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.cloud.service.dps.exception.DpsException; -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.ExecutedMetisPluginId; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.exceptions.InvalidIndexPluginException; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractIndexPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DpsTaskSettings; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin.MonitorResult; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.ThrottlingValues; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.UnrecoverableExternalTaskException; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is a {@link Callable} class that accepts a {@link WorkflowExecution}. It starts that - * WorkflowExecution given to it and will continue monitoring and updating its progress until it - * ends either by user interaction or by the end of the Workflow. When the WorkflowExecution is - * received there is a chance that the execution is already being handled from another - * WorkflowExecutor in another instance and if that is the case the WorkflowExecution will be - * dropped. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-29 - */ -public class WorkflowExecutor implements Callable> { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutor.class); - private static final String EXECUTION_ERROR_PREFIX = "Execution of external task presented with an error. "; - private static final String MONITOR_ERROR_PREFIX = "An error occurred while monitoring the external task. "; - private static final String POSTPROCESS_ERROR_PREFIX = "An error occurred while post-processing the external task. "; - private static final String TRIGGER_ERROR_PREFIX = "An error occurred while triggering the external task. "; - private static final String DETAILED_EXCEPTION_FORMAT = "%s%nDetailed exception:%s"; - - protected static final int MAX_CANCEL_OR_MONITOR_FAILURES = 10; - - private final SemaphoresPerPluginManager semaphoresPerPluginManager; - private final WorkflowExecutionDao workflowExecutionDao; - private final WorkflowPostProcessor workflowPostProcessor; - private final int monitorCheckIntervalInSecs; - private final long periodOfNoProcessedRecordsChangeInSeconds; - private final DpsClient dpsClient; - private final String ecloudBaseUrl; - private final String ecloudProvider; - private final String metisCoreBaseUrl; - private WorkflowExecution workflowExecution; - private final ThrottlingValues throttlingValues; - - WorkflowExecutor(WorkflowExecution workflowExecution, PersistenceProvider persistenceProvider, - WorkflowExecutionSettings workflowExecutionSettings) { - this.workflowExecution = workflowExecution; - this.semaphoresPerPluginManager = persistenceProvider.getSemaphoresPerPluginManager(); - this.workflowExecutionDao = persistenceProvider.getWorkflowExecutionDao(); - this.workflowPostProcessor = persistenceProvider.getWorkflowPostProcessor(); - this.dpsClient = persistenceProvider.getDpsClient(); - this.monitorCheckIntervalInSecs = workflowExecutionSettings.getDpsMonitorCheckIntervalInSecs(); - this.periodOfNoProcessedRecordsChangeInSeconds = TimeUnit.MINUTES - .toSeconds(workflowExecutionSettings.getPeriodOfNoProcessedRecordsChangeInMinutes()); - this.ecloudBaseUrl = workflowExecutionSettings.getEcloudBaseUrl(); - this.ecloudProvider = workflowExecutionSettings.getEcloudProvider(); - this.metisCoreBaseUrl = workflowExecutionSettings.getMetisCoreBaseUrl(); - this.throttlingValues = workflowExecutionSettings.getThrottlingValues(); - } - - @Override - public Pair call() { - // Perform the work - run the workflow. - LOGGER.info("workflowExecutionId: {}, priority {} - Starting workflow execution", - workflowExecution.getId(), workflowExecution.getWorkflowPriority()); - final Pair didPluginRunDatePair = runInqueueOrRunningStateWorkflowExecution(); - final Date finishDate = didPluginRunDatePair.getLeft(); - final Boolean didPluginsRun = didPluginRunDatePair.getRight(); - - // Process the results if we were not interrupted - if (!currentThread().isInterrupted()) { - if (finishDate == null && workflowExecutionDao.isCancelling(workflowExecution.getId())) { - // If the workflow was cancelled before it had the chance to finish, we cancel all remaining - // plugins. - workflowExecution.setWorkflowAndAllQualifiedPluginsToCancelled(); - // Make sure the cancelledBy information is not lost - String cancelledBy = workflowExecutionDao.getById(workflowExecution.getId().toString()) - .getCancelledBy(); - workflowExecution.setCancelledBy(cancelledBy); - LOGGER.info("workflowExecutionId: {} - Cancelled running workflow execution", - workflowExecution.getId()); - } else if (finishDate == null && didPluginsRun) { - // One plugin failed - workflowExecution.checkAndSetAllRunningAndInqueuePluginsToCancelledIfOnePluginHasFailed(); - } else if (finishDate == null) { - // A plugin was not allowed to run because of no slot space - // Increase priority for this execution - workflowExecution.setWorkflowPriority(workflowExecution.getWorkflowPriority() + 1); - LOGGER.info("workflowExecution: {} - Stop workflow execution a plugin was not allowed to " - + "run(priority increased)", workflowExecution.getId()); - } else { - // If the workflow finished successfully, we record this. - workflowExecution.setFinishedDate(finishDate); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecution.setCancelling(false); - LOGGER.info("workflowExecutionId: {} - Finished workflow execution", - workflowExecution.getId()); - } - } - - // The only full update is used here. The rest of the execution uses partial updates to avoid - // losing the cancelling state field - workflowExecutionDao.update(workflowExecution); - return new ImmutablePair<>(workflowExecution, didPluginsRun); - } - - /** - * Will determine from which plugin of the workflow to start execution from and will iterate - * through the plugins of the workflow and run them one by one. - *

It returns a {@link Pair} of a finished {@link Date} and a {@link Boolean} flag. - *

    - *
  • - * The Date represents the finished date of the workflow or null if it did not finish - * as expected. That can happen if an error occurred or some plugin was not permitted to - * run. - *
  • - *
  • - * The Boolean flag represents true if all plugins were allowed to run or false if one of - * the plugins was not allowed to run. - *
  • - *
- *

- * - * @return The pair of date and boolean flag - */ - private Pair runInqueueOrRunningStateWorkflowExecution() { - - // Find the first plugin to continue execution from - int firstPluginPositionToStart = getFirstPluginPositionToStart(); - - boolean didPluginRun = true; - boolean continueNextPlugin = true; - List metisPlugins = workflowExecution.getMetisPlugins(); - // One by one start the plugins of the workflow - for (int i = firstPluginPositionToStart; - i < metisPlugins.size() && continueNextPlugin; i++) { - final AbstractMetisPlugin plugin = metisPlugins.get(i); - - //Run plugin if available space - didPluginRun = runMetisPluginWithSemaphoreAllocation(i, plugin); - continueNextPlugin = !currentThread().isInterrupted() && didPluginRun && ( - !workflowExecutionDao.isCancelling(workflowExecution.getId()) - || plugin.getFinishedDate() != null) - && plugin.getPluginStatus() != PluginStatus.FAILED; - } - - // Compute the finished date - final AbstractMetisPlugin lastPlugin = metisPlugins.get(metisPlugins.size() - 1); - final Date finishDate; - if (lastPlugin.getPluginStatus() == PluginStatus.FINISHED) { - finishDate = lastPlugin.getFinishedDate(); - } else { - finishDate = null; - } - return new ImmutablePair<>(finishDate, didPluginRun); - } - - private int getFirstPluginPositionToStart() { - int firstPluginPositionToStart = 0; - List metisPlugins = workflowExecution.getMetisPlugins(); - for (int i = 0; i < metisPlugins.size(); i++) { - AbstractMetisPlugin metisPlugin = metisPlugins.get(i); - if (metisPlugin.getPluginStatus() == PluginStatus.INQUEUE - || metisPlugin.getPluginStatus() == PluginStatus.RUNNING - || metisPlugin.getPluginStatus() == PluginStatus.CLEANING - || metisPlugin.getPluginStatus() == PluginStatus.PENDING - || metisPlugin.getPluginStatus() == PluginStatus.IDENTIFYING_DELETED_RECORDS) { - firstPluginPositionToStart = i; - break; - } - } - return firstPluginPositionToStart; - } - - /** - * Tries to acquire a semaphore permission corresponding to the provided plugin's type. - *
    - *
  1. If semaphore permission granted then there is space for that plugin and the plugin - * starts
  2. - *
  3. If semaphore permission NOT granted then the plugin din not run and a false flag is - * send back as a return result
  4. - *
- * - * @param i the index of the plugin in the list of plugins inside the workflow execution - * @param plugin the provided plugin to be ran - * @return true if plugin ran, false if plugin did not run - */ - private boolean runMetisPluginWithSemaphoreAllocation(int i, AbstractMetisPlugin plugin) { - // Sanity check - if (plugin == null) { - throw new IllegalStateException("Plugin cannot be null."); - } - // Check the plugin: it has to be executable - AbstractExecutablePlugin executablePlugin = expectExecutablePlugin(plugin); - - final ExecutablePluginType executablePluginType = ExecutablePluginType - .getExecutablePluginFromPluginType(executablePlugin.getPluginType()); - if (executablePluginType == null) { - throw new IllegalStateException("Plugin type cannot be null."); - } - - //Try acquire semaphore and run plugin. Don't forget to release - boolean acquired = semaphoresPerPluginManager - .tryAcquireForExecutablePluginType(executablePluginType); - if (acquired) { - try { - LOGGER.debug("workflowExecutionId: {}, executablePluginType: {} - Acquired semaphore", - workflowExecution.getId(), executablePluginType); - final Date startDateToUse = i == 0 ? workflowExecution.getStartedDate() : new Date(); - runMetisPlugin(executablePlugin, startDateToUse, workflowExecution.getDatasetId()); - } finally { - semaphoresPerPluginManager.releaseForPluginType(executablePluginType); - LOGGER.debug("workflowExecutionId: {}, executablePluginType: {} - Released semaphore", - workflowExecution.getId(), executablePluginType); - } - } else { - // Rest workflow execution to INQUEUE so that it can be reclaimed - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecutionDao.updateMonitorInformation(workflowExecution); - } - return acquired; - } - - /** - * It will prepare the plugin, request the external execution and will periodically monitor, - * update the plugin's progress and at the end finalize the plugin's status and finished date. - * - * @param plugin the plugin to run - * @param startDateToUse The date that should be used as start date (if the plugin is not already - * running). - * @param datasetId The dataset ID. - */ - private void runMetisPlugin(AbstractExecutablePlugin plugin, Date startDateToUse, - String datasetId) { - try { - // Compute previous plugin revision information. Only need to look within the workflow: when - // scheduling the workflow, the previous plugin information is set for the first plugin. - final AbstractExecutablePluginMetadata metadata = plugin.getPluginMetadata(); - final ExecutedMetisPluginId executedMetisPluginId = ExecutedMetisPluginId - .forPredecessor(plugin); - if (executedMetisPluginId == null) { - final ExecutablePlugin predecessor = DataEvolutionUtils - .computePredecessorPlugin(metadata.getExecutablePluginType(), workflowExecution); - if (predecessor != null) { - metadata.setPreviousRevisionInformation(predecessor); - // Save so that we can use it below to find the root ancestor. - workflowExecutionDao.updateWorkflowPlugins(workflowExecution); - } - } - - // Compute base harvesting plugin information. We can't do this when creating the workflow - // execution: the harvest might be part of this very workflow. - if (DataEvolutionUtils.getIndexPluginGroup() - .contains(plugin.getPluginMetadata().getExecutablePluginType())) { - final PluginWithExecutionId rootAncestor = new DataEvolutionUtils( - workflowExecutionDao).getRootAncestor( - new PluginWithExecutionId<>(workflowExecution, plugin)); - setHarvestParametersToIndexingPlugin(plugin, rootAncestor.getPlugin()); - } - - // Start execution if it has not already started - if (StringUtils.isEmpty(plugin.getExternalTaskId())) { - if (plugin.getPluginStatus() == PluginStatus.INQUEUE) { - plugin.setStartedDate(startDateToUse); - } - final DpsTaskSettings dpsTaskSettings = new DpsTaskSettings( - ecloudBaseUrl, ecloudProvider, workflowExecution.getEcloudDatasetId(), - getExternalTaskIdOfPreviousPlugin(metadata), metisCoreBaseUrl, throttlingValues); - plugin - .execute(workflowExecution.getDatasetId(), dpsClient, dpsTaskSettings); - } - } catch (ExternalTaskException | RuntimeException e) { - LOGGER.warn(String.format("workflowExecutionId: %s, pluginType: %s - Execution of plugin " - + "failed", workflowExecution.getId(), plugin.getPluginType()), e); - plugin.setFinishedDate(null); - plugin.setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - plugin.setFailMessage(String.format(DETAILED_EXCEPTION_FORMAT, TRIGGER_ERROR_PREFIX, - ExceptionUtils.getStackTrace(e))); - return; - } finally { - workflowExecutionDao.updateWorkflowPlugins(workflowExecution); - } - - // Start periodical check and wait for plugin to be done - long sleepTime = TimeUnit.SECONDS.toMillis(monitorCheckIntervalInSecs); - periodicCheckingLoop(sleepTime, plugin, datasetId); - } - - private void setHarvestParametersToIndexingPlugin(ExecutablePlugin indexingPlugin, - ExecutablePlugin harvestPlugin) { - - // Check the harvesting types - if (!DataEvolutionUtils.getHarvestPluginGroup() - .contains(harvestPlugin.getPluginMetadata().getExecutablePluginType())) { - throw new IllegalStateException(String.format( - "workflowExecutionId: %s, pluginId: %s - Found plugin root that is not a harvesting plugin.", - workflowExecution.getId(), indexingPlugin.getId())); - } - - // get the information from the harvesting plugin. - final boolean incrementalHarvest = - harvestPlugin.getPluginMetadata() instanceof AbstractHarvestPluginMetadata abstractHarvestPluginMetadata - && abstractHarvestPluginMetadata.isIncrementalHarvest(); - final Date harvestDate = harvestPlugin.getStartedDate(); - - // Set the information to the indexing plugin. - if (indexingPlugin.getPluginMetadata() instanceof AbstractIndexPluginMetadata abstractIndexPluginMetadata) { - abstractIndexPluginMetadata.setIncrementalIndexing(incrementalHarvest); - abstractIndexPluginMetadata.setHarvestDate(harvestDate); - } - } - - private String getExternalTaskIdOfPreviousPlugin(AbstractExecutablePluginMetadata metadata) { - - // Get the previous plugin parameters from the plugin - if there is none, we are done. - final ExecutedMetisPluginId predecessorPlugin = ExecutedMetisPluginId.forPredecessor(metadata); - if (predecessorPlugin == null) { - return null; - } - - // Get the previous plugin based on the parameters. - final WorkflowExecution previousExecution = workflowExecutionDao - .getByTaskExecution(predecessorPlugin, workflowExecution.getDatasetId()); - return Optional.ofNullable(previousExecution) - .flatMap(execution -> execution.getMetisPluginWithType(predecessorPlugin.getPluginType())) - .map(this::expectExecutablePlugin).map(AbstractExecutablePlugin::getExternalTaskId) - .orElse(null); - } - - private AbstractExecutablePlugin expectExecutablePlugin(AbstractMetisPlugin plugin) { - if (plugin == null) { - return null; - } - - if (plugin instanceof AbstractExecutablePlugin abstractExecutablePlugin) { - return abstractExecutablePlugin; - } - throw new IllegalStateException(String.format( - "workflowExecutionId: %s, pluginId: %s - Found plugin that is not an executable plugin.", - workflowExecution.getId(), plugin.getId())); - } - - private void periodicCheckingLoop(long sleepTime, AbstractExecutablePlugin plugin, - String datasetId) { - MonitorResult monitorResult = null; - int consecutiveCancelOrMonitorFailures = 0; - AtomicBoolean externalCancelCallSent = new AtomicBoolean(false); - AtomicInteger previousProcessedRecords = new AtomicInteger(0); - AtomicLong checkPointDateOfProcessedRecordsPeriodInMillis = new AtomicLong( - System.currentTimeMillis()); - do { - try { - Thread.sleep(sleepTime); - // Check if the task is cancelling and send the external cancelling call if needed - sendExternalCancelCallIfNeeded(externalCancelCallSent, plugin, previousProcessedRecords, - checkPointDateOfProcessedRecordsPeriodInMillis); - monitorResult = plugin.monitor(dpsClient); - consecutiveCancelOrMonitorFailures = 0; - - if (monitorResult.getTaskState() == TaskState.REMOVING_FROM_SOLR_AND_MONGO || - isIndexingInPostProcessing(monitorResult, plugin)) { - plugin.setPluginStatusAndResetFailMessage(PluginStatus.CLEANING); - - } else if (isHarvestingInPostProcessing(monitorResult, plugin)) { - plugin.setPluginStatusAndResetFailMessage(PluginStatus.IDENTIFYING_DELETED_RECORDS); - - } else { - plugin.setPluginStatusAndResetFailMessage(PluginStatus.RUNNING); - } - - } catch (InterruptedException e) { - LOGGER.warn(String.format( - "workflowExecutionId: %s, pluginType: %s - Thread was interrupted during monitoring of external task", - workflowExecution.getId(), plugin.getPluginType()), e); - currentThread().interrupt(); - return; - } catch (UnrecoverableExternalTaskException e) { - LOGGER.warn(String - .format("workflowExecutionId: %s, pluginType: %s - UnrecoverableExternalTaskException" - + " occurred. Setting task state failed ", workflowExecution.getId(), plugin.getPluginType()), e); - // Set plugin to FAILED and return immediately - plugin.setFinishedDate(null); - plugin.setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - plugin.setFailMessage(String.format(DETAILED_EXCEPTION_FORMAT, MONITOR_ERROR_PREFIX, - ExceptionUtils.getStackTrace(e))); - return; - } catch (ExternalTaskException | RuntimeException e) { - LOGGER.warn(String - .format("workflowExecutionId: %s, pluginType: %s - ExternalTaskException occurred.", - workflowExecution.getId(), plugin.getPluginType()), e); - - consecutiveCancelOrMonitorFailures++; - LOGGER.warn(String.format( - "workflowExecutionId: %s, pluginType: %s - Monitoring of external task failed %s " - + "consecutive times. After exceeding %s retries, pending status will be set", - workflowExecution.getId(), plugin.getPluginType(), consecutiveCancelOrMonitorFailures, - MAX_CANCEL_OR_MONITOR_FAILURES), e); - if (consecutiveCancelOrMonitorFailures >= MAX_CANCEL_OR_MONITOR_FAILURES) { - plugin.setPluginStatusAndResetFailMessage(PluginStatus.PENDING); - } - } finally { - Date updatedDate = new Date(); - plugin.setUpdatedDate(updatedDate); - workflowExecution.setUpdatedDate(updatedDate); - workflowExecutionDao.updateMonitorInformation(workflowExecution); - } - } while (isContinueMonitor(monitorResult)); - - // Perform post-processing if needed. - if (!applyPostProcessing(monitorResult, plugin, datasetId)) { - return; - } - - // Set the status of the task. - preparePluginStateAndFinishedDate(plugin, monitorResult); - } - - private boolean isIndexingInPostProcessing(MonitorResult monitor, - AbstractExecutablePlugin plugin) { - return monitor.getTaskState() == TaskState.IN_POST_PROCESSING && - (plugin.getPluginType() == PluginType.REINDEX_TO_PREVIEW || - plugin.getPluginType() == PluginType.REINDEX_TO_PUBLISH); - } - - private boolean isHarvestingInPostProcessing(MonitorResult monitor, AbstractExecutablePlugin plugin) { - return monitor.getTaskState() == TaskState.IN_POST_PROCESSING && - (plugin.getPluginType() == PluginType.HTTP_HARVEST || - plugin.getPluginType() == PluginType.OAIPMH_HARVEST); - } - - private void sendExternalCancelCallIfNeeded(AtomicBoolean externalCancelCallSent, - AbstractExecutablePlugin plugin, AtomicInteger previousProcessedRecords, - AtomicLong checkPointDateOfProcessedRecordsPeriodInMillis) throws ExternalTaskException { - if (!externalCancelCallSent.get() && shouldPluginBeCancelled(plugin, previousProcessedRecords, - checkPointDateOfProcessedRecordsPeriodInMillis)) { - // Update workflowExecution first, to retrieve cancelling information from db - workflowExecution = workflowExecutionDao.getById(workflowExecution.getId().toString()); - plugin.cancel(dpsClient, workflowExecution.getCancelledBy()); - externalCancelCallSent.set(true); - } - } - - private boolean applyPostProcessing(MonitorResult monitorResult, AbstractExecutablePlugin plugin, - String datasetId) { - boolean processingAppliedOrNotRequired = true; - if (monitorResult.getTaskState() == TaskState.PROCESSED) { - try { - this.workflowPostProcessor.performPluginPostProcessing(plugin, datasetId); - } catch (DpsException | InvalidIndexPluginException | BadContentException | RuntimeException e) { - processingAppliedOrNotRequired = false; - LOGGER.warn("Problem occurred during Metis post-processing.", e); - plugin.setFinishedDate(null); - plugin.setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - plugin.setFailMessage(String.format(DETAILED_EXCEPTION_FORMAT, POSTPROCESS_ERROR_PREFIX, - ExceptionUtils.getStackTrace(e))); - } - } - return processingAppliedOrNotRequired; - } - - private boolean isContinueMonitor(MonitorResult monitorResult) { - return monitorResult == null || (monitorResult.getTaskState() != TaskState.DROPPED - && monitorResult.getTaskState() != TaskState.PROCESSED); - } - - private boolean shouldPluginBeCancelled(AbstractExecutablePlugin plugin, - AtomicInteger previousProcessedRecords, - AtomicLong checkPointDateOfProcessedRecordsPeriodInMillis) { - // A plugin with CLEANING state is NOT cancellable, it will be when the state is updated - final boolean notCleaningAndCancelling = - plugin.getPluginStatus() != PluginStatus.CLEANING && workflowExecutionDao - .isCancelling(workflowExecution.getId()); - // A cleaning or a pending task should not be cancelled by exceeding the minute cap - final boolean notCleaningOrPending = plugin.getPluginStatus() != PluginStatus.CLEANING - && plugin.getPluginStatus() != PluginStatus.PENDING; - final boolean isMinuteCapExceeded = isMinuteCapOverWithoutChangeInProcessedRecords(plugin, - previousProcessedRecords, checkPointDateOfProcessedRecordsPeriodInMillis); - return (notCleaningAndCancelling || (notCleaningOrPending && isMinuteCapExceeded)); - } - - private boolean isMinuteCapOverWithoutChangeInProcessedRecords(AbstractExecutablePlugin plugin, - AtomicInteger previousProcessedRecords, - AtomicLong checkPointDateOfProcessedRecordsPeriodInMillis) { - final int processedRecords = plugin.getExecutionProgress().getProcessedRecords(); - //If CLEANING is in progress then just reset the values to be sure and return false - //Or if we have progress - if (plugin.getPluginStatus() == PluginStatus.CLEANING - || plugin.getPluginStatus() == PluginStatus.PENDING - || previousProcessedRecords.get() != processedRecords) { - checkPointDateOfProcessedRecordsPeriodInMillis.set(System.currentTimeMillis()); - previousProcessedRecords.set(processedRecords); - return false; - } - - final boolean isMinuteCapOverWithoutChangeInProcessedRecords = TimeUnit.MILLISECONDS.toSeconds( - System.currentTimeMillis() - checkPointDateOfProcessedRecordsPeriodInMillis.get()) - >= periodOfNoProcessedRecordsChangeInSeconds; - if (isMinuteCapOverWithoutChangeInProcessedRecords) { - //Request cancelling of the execution - workflowExecutionDao.setCancellingState(workflowExecution, null); - } - return isMinuteCapOverWithoutChangeInProcessedRecords; - } - - private void preparePluginStateAndFinishedDate(AbstractExecutablePlugin plugin, - MonitorResult monitorResult) { - if (monitorResult.getTaskState() == TaskState.PROCESSED) { - plugin.setFinishedDate(new Date()); - plugin.setPluginStatusAndResetFailMessage(PluginStatus.FINISHED); - } else if (monitorResult.getTaskState() == TaskState.DROPPED && !workflowExecutionDao - .isCancelling(workflowExecution.getId())) { - plugin.setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - final String failMessage = - StringUtils.isBlank(monitorResult.getTaskInfo()) ? "No further information received." - : monitorResult.getTaskInfo(); - plugin.setFailMessage(EXECUTION_ERROR_PREFIX + failMessage); - } - workflowExecutionDao.updateWorkflowPlugins(workflowExecution); - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutorManager.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutorManager.java deleted file mode 100644 index 19286d2c6d..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowExecutorManager.java +++ /dev/null @@ -1,137 +0,0 @@ -package eu.europeana.metis.core.execution; - -import com.rabbitmq.client.AMQP.BasicProperties; -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.MessageProperties; -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import eu.europeana.metis.core.workflow.plugins.ThrottlingValues; -import org.redisson.api.RedissonClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Manager class for adding executions in the distributed queue. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-30 - */ -public class WorkflowExecutorManager extends PersistenceProvider implements - WorkflowExecutionSettings { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutorManager.class); - private static final int DEFAULT_MONITOR_CHECK_INTERVAL_IN_SECS = 5; - private static final int DEFAULT_PERIOD_OF_NO_PROCESSED_RECORDS_CHANGE_IN_MINUTES = 30; - - private int dpsMonitorCheckIntervalInSecs = DEFAULT_MONITOR_CHECK_INTERVAL_IN_SECS; //Use setter otherwise default - private int periodOfNoProcessedRecordsChangeInMinutes = DEFAULT_PERIOD_OF_NO_PROCESSED_RECORDS_CHANGE_IN_MINUTES; //Use setter otherwise default - - private String rabbitmqQueueName; //Initialize with setter - private String ecloudBaseUrl; //Initialize with setter - private String ecloudProvider; //Initialize with setter - private String metisCoreBaseUrl; //Initialize with setter - private ThrottlingValues throttlingValues; //Initialize with setter - - /** - * Autowired constructor. - * - * @param semaphoresPerPluginManager the semaphores per plugin manager - * @param workflowExecutionDao the DAO for accessing WorkflowExecutions - * @param workflowPostProcessor the workflow post processor - * @param rabbitmqPublisherChannel the channel for publishing to RabbitMQ - * @param rabbitmqConsumerChannel the channel for consuming from RabbitMQ - * @param redissonClient the redisson client for distributed locks - * @param dpsClient the Data Processing Service client from ECloud - */ - public WorkflowExecutorManager(SemaphoresPerPluginManager semaphoresPerPluginManager, - WorkflowExecutionDao workflowExecutionDao, WorkflowPostProcessor workflowPostProcessor, - Channel rabbitmqPublisherChannel, Channel rabbitmqConsumerChannel, - RedissonClient redissonClient, DpsClient dpsClient) { - super(rabbitmqPublisherChannel, rabbitmqConsumerChannel, semaphoresPerPluginManager, - workflowExecutionDao, workflowPostProcessor, redissonClient, dpsClient); - } - - /** - * Adds a WorkflowExecution identifier in the distributed queue. - * - * @param userWorkflowExecutionObjectId the WorkflowExecution identifier - * @param priority the priority of the WorkflowExecution in the queue - */ - public void addWorkflowExecutionToQueue(String userWorkflowExecutionObjectId, int priority) { - synchronized (getRabbitmqPublisherChannel()) { - //Based on Rabbitmq the basicPublish between threads should be controlled(synchronized) - BasicProperties basicProperties = MessageProperties.PERSISTENT_TEXT_PLAIN.builder() - .priority(priority).build(); - try { - //First parameter is the ExchangeName which is not used - getRabbitmqPublisherChannel().basicPublish("", rabbitmqQueueName, basicProperties, - userWorkflowExecutionObjectId.getBytes(StandardCharsets.UTF_8)); - } catch (IOException e) { - LOGGER.error("WorkflowExecution with objectId: {} not added in queue..", - userWorkflowExecutionObjectId, e); - } - } - } - - public void setRabbitmqQueueName(String rabbitmqQueueName) { - this.rabbitmqQueueName = rabbitmqQueueName; - } - - public void setEcloudBaseUrl(String ecloudBaseUrl) { - this.ecloudBaseUrl = ecloudBaseUrl; - } - - public void setEcloudProvider(String ecloudProvider) { - this.ecloudProvider = ecloudProvider; - } - - public void setMetisCoreBaseUrl(String metisCoreBaseUrl) { - this.metisCoreBaseUrl = metisCoreBaseUrl; - } - - public void setThrottlingValues(ThrottlingValues throttlingValues){ - this.throttlingValues = throttlingValues; - } - - public void setDpsMonitorCheckIntervalInSecs(int dpsMonitorCheckIntervalInSecs) { - this.dpsMonitorCheckIntervalInSecs = dpsMonitorCheckIntervalInSecs; - } - - public void setPeriodOfNoProcessedRecordsChangeInMinutes( - int periodOfNoProcessedRecordsChangeInMinutes) { - this.periodOfNoProcessedRecordsChangeInMinutes = periodOfNoProcessedRecordsChangeInMinutes; - } - - @Override - public int getDpsMonitorCheckIntervalInSecs() { - return dpsMonitorCheckIntervalInSecs; - } - - @Override - public int getPeriodOfNoProcessedRecordsChangeInMinutes() { - return periodOfNoProcessedRecordsChangeInMinutes; - } - - @Override - public String getEcloudBaseUrl() { - return ecloudBaseUrl; - } - - @Override - public String getEcloudProvider() { - return ecloudProvider; - } - - @Override - public String getMetisCoreBaseUrl() { - return metisCoreBaseUrl; - } - - @Override - public ThrottlingValues getThrottlingValues() { - return throttlingValues; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowPostProcessor.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowPostProcessor.java deleted file mode 100644 index cc21ceaa3a..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/execution/WorkflowPostProcessor.java +++ /dev/null @@ -1,241 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static eu.europeana.metis.network.ExternalRequestUtil.retryableExternalRequestForNetworkExceptionsThrowing; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.dps.RecordState; -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.service.dps.exception.DpsException; -import eu.europeana.cloud.service.dps.metis.indexing.TargetIndexingDatabase; -import eu.europeana.metis.core.common.RecordIdUtils; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.Dataset.PublicationFitness; -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.exceptions.InvalidIndexPluginException; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.DepublishPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.CollectionUtils; - -/** - * This object can perform post-processing for workflows. - */ -public class WorkflowPostProcessor { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowPostProcessor.class); - - private static final int ECLOUD_REQUEST_BATCH_SIZE = 1000; - - private final DepublishRecordIdDao depublishRecordIdDao; - private final DatasetDao datasetDao; - private final WorkflowExecutionDao workflowExecutionDao; - private final DpsClient dpsClient; - - /** - * Constructor. - * - * @param depublishRecordIdDao The DAO for de-published records - * @param datasetDao The DAO for datasets - * @param workflowExecutionDao The DAO for workflow executions - * @param dpsClient the dps client - */ - public WorkflowPostProcessor(DepublishRecordIdDao depublishRecordIdDao, - DatasetDao datasetDao, WorkflowExecutionDao workflowExecutionDao, DpsClient dpsClient) { - this.depublishRecordIdDao = depublishRecordIdDao; - this.datasetDao = datasetDao; - this.workflowExecutionDao = workflowExecutionDao; - this.dpsClient = dpsClient; - } - - /** - * Performs post-processing for indexing plugins - * - * @param indexPlugin The index plugin - * @param datasetId The dataset id - * @throws DpsException If communication with e-cloud dps failed - * @throws InvalidIndexPluginException If invalid type of plugin - * @throws BadContentException In case the records would violate the maximum number of de-published records that each dataset - * can have. - */ - private void indexPostProcess(AbstractExecutablePlugin indexPlugin, String datasetId) - throws DpsException, InvalidIndexPluginException, BadContentException { - TargetIndexingDatabase targetIndexingDatabase; - switch (indexPlugin) { - case IndexToPreviewPlugin indexToPreviewPlugin -> targetIndexingDatabase = indexToPreviewPlugin.getTargetIndexingDatabase(); - case IndexToPublishPlugin indexToPublishPlugin -> { - targetIndexingDatabase = indexToPublishPlugin.getTargetIndexingDatabase(); - reinstateDepublishRecordIdsStatus((IndexToPublishPlugin) indexPlugin, datasetId); - } - default -> throw new InvalidIndexPluginException("Plugin is not of the types supported"); - } - final Integer databaseTotalRecords = retryableExternalRequestForNetworkExceptionsThrowing(() -> - (int) dpsClient.getTotalMetisDatabaseRecords(datasetId, targetIndexingDatabase)); - indexPlugin.getExecutionProgress().setTotalDatabaseRecords(databaseTotalRecords); - } - - private void reinstateDepublishRecordIdsStatus(IndexToPublishPlugin indexPlugin, String datasetId) - throws BadContentException, DpsException { - final boolean isIncremental = indexPlugin.getPluginMetadata().isIncrementalIndexing(); - if (isIncremental) { - // get all currently de-published records IDs from the database and create their full versions - final Set depublishedRecordIds = depublishRecordIdDao.getAllDepublishRecordIdsWithStatus( - datasetId, DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.DEPUBLISHED); - final Map depublishedRecordIdsByFullId = depublishedRecordIds.stream() - .collect(Collectors.toMap( - id -> RecordIdUtils.composeFullRecordId( - datasetId, id), - Function.identity())); - - // Check which have been published by the index action - use full record IDs for eCloud. - if (!CollectionUtils.isEmpty(depublishedRecordIdsByFullId)) { - final List publishedRecordIds = dpsClient.searchPublishedDatasetRecords(datasetId, - new ArrayList<>(depublishedRecordIdsByFullId.keySet())); - - // Remove the 'depublished' status. Note: we need to check for an empty result (otherwise - // the DAO call will update all records). Use the simple record IDs again. - if (!CollectionUtils.isEmpty(publishedRecordIds)) { - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, - publishedRecordIds.stream().map(depublishedRecordIdsByFullId::get) - .collect(Collectors.toSet()), DepublicationStatus.PENDING_DEPUBLICATION, null); - } - } - } else { - // reset de-publish status, pass null, all records will be de-published - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, null, - DepublicationStatus.PENDING_DEPUBLICATION, null); - } - } - - /** - * Performs post-processing for de-publish plugins - * - * @param depublishPlugin The de-publish plugin - * @param datasetId The dataset id - * @throws DpsException If communication with e-cloud dps failed - */ - private void depublishPostProcess(DepublishPlugin depublishPlugin, String datasetId) - throws DpsException { - if (depublishPlugin.getPluginMetadata().isDatasetDepublish()) { - depublishDatasetPostProcess(datasetId); - } else { - depublishRecordPostProcess(depublishPlugin, datasetId); - } - } - - /** - * @param depublishPlugin The de-publish plugin - * @param datasetId The dataset id - * @throws DpsException If communication with e-cloud dps failed - */ - private void depublishRecordPostProcess(DepublishPlugin depublishPlugin, String datasetId) - throws DpsException { - - // Retrieve the successfully depublished records. - final long externalTaskId = Long.parseLong(depublishPlugin.getExternalTaskId()); - final List subTasks = new ArrayList<>(); - List subTasksBatch; - do { - subTasksBatch = retryableExternalRequestForNetworkExceptionsThrowing( - () -> dpsClient.getDetailedTaskReportBetweenChunks( - depublishPlugin.getTopologyName(), externalTaskId, subTasks.size(), - subTasks.size() + ECLOUD_REQUEST_BATCH_SIZE)); - subTasks.addAll(subTasksBatch); - } while (subTasksBatch.size() == ECLOUD_REQUEST_BATCH_SIZE); - - // Mark the records as DEPUBLISHED. - final Map> successfulRecords = subTasks.stream() - .filter(subTask -> subTask.getRecordState() == RecordState.SUCCESS) - .map(SubTaskInfo::getResource) - .map(RecordIdUtils::decomposeFullRecordId) - .collect(Collectors.groupingBy(Pair::getLeft, - Collectors.mapping(Pair::getRight, Collectors.toSet()))); - successfulRecords.forEach((dataset, records) -> - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(dataset, records, - DepublicationStatus.DEPUBLISHED, new Date())); - - // Set publication fitness to PARTIALLY FIT (if not set to the more severe UNFIT). - final Dataset dataset = datasetDao.getDatasetByDatasetId(datasetId); - if (dataset.getPublicationFitness() != PublicationFitness.UNFIT) { - dataset.setPublicationFitness(PublicationFitness.PARTIALLY_FIT); - datasetDao.update(dataset); - } - } - - /** - * @param datasetId The dataset id - */ - private void depublishDatasetPostProcess(String datasetId) { - - // Set all depublished records back to PENDING. - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, null, - DepublicationStatus.PENDING_DEPUBLICATION, null); - // Find latest PUBLISH Type Plugin and set dataStatus to DELETED. - final PluginWithExecutionId latestSuccessfulPlugin = workflowExecutionDao - .getLatestSuccessfulPlugin(datasetId, OrchestratorService.PUBLISH_TYPES); - if (Objects.nonNull(latestSuccessfulPlugin) && Objects - .nonNull(latestSuccessfulPlugin.getPlugin())) { - final WorkflowExecution workflowExecutionToUpdate = workflowExecutionDao - .getById(latestSuccessfulPlugin.getExecutionId()); - final Optional metisPluginWithType = workflowExecutionToUpdate - .getMetisPluginWithType(latestSuccessfulPlugin.getPlugin().getPluginType()); - if (metisPluginWithType.isPresent()) { - metisPluginWithType.get().setDataStatus(DataStatus.DELETED); - workflowExecutionDao.updateWorkflowPlugins(workflowExecutionToUpdate); - } - } - // Set publication fitness to UNFIT. - final Dataset dataset = datasetDao.getDatasetByDatasetId(datasetId); - dataset.setPublicationFitness(PublicationFitness.UNFIT); - datasetDao.update(dataset); - } - - /** - * This method performs post-processing after an individual workflow step. - * - * @param plugin The plugin that was successfully executed - * @param datasetId The dataset ID to which the plugin belongs - * @throws DpsException If communication with e-cloud dps failed - * @throws InvalidIndexPluginException If invalid type of plugin - * @throws BadContentException In case the records would violate the maximum number of de-published records that each dataset - * can have. - */ - void performPluginPostProcessing(AbstractExecutablePlugin plugin, String datasetId) - throws DpsException, InvalidIndexPluginException, BadContentException { - - final PluginType pluginType = plugin.getPluginType(); - LOGGER.info("Starting postprocessing of plugin {} in dataset {}.", pluginType, datasetId); - if (pluginType == PluginType.PREVIEW || pluginType == PluginType.PUBLISH) { - indexPostProcess(plugin, datasetId); - } else if (pluginType == PluginType.DEPUBLISH) { - depublishPostProcess((DepublishPlugin) plugin, datasetId); - } - LOGGER.info("Finished postprocessing of plugin {} in dataset {}.", pluginType, datasetId); - } -} \ No newline at end of file diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProvider.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProvider.java deleted file mode 100644 index b79df1c317..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProvider.java +++ /dev/null @@ -1,14 +0,0 @@ -package eu.europeana.metis.core.mongo; - -import dev.morphia.Datastore; - -/** - * This interface represents an object that can make available a Morphia connection. - */ -public interface MorphiaDatastoreProvider { - - /** - * @return the {@link Datastore} connection to Mongo - */ - Datastore getDatastore(); -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProviderImpl.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProviderImpl.java deleted file mode 100644 index 3ac2d74457..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/mongo/MorphiaDatastoreProviderImpl.java +++ /dev/null @@ -1,203 +0,0 @@ -package eu.europeana.metis.core.mongo; - -import com.mongodb.client.MongoClient; -import dev.morphia.Datastore; -import dev.morphia.Morphia; -import dev.morphia.mapping.DiscriminatorFunction; -import dev.morphia.mapping.Mapper; -import dev.morphia.mapping.MapperOptions; -import dev.morphia.mapping.NamingStrategy; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetIdSequence; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.dataset.DepublishRecordId; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractIndexPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.DepublishPlugin; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPlugin; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPlugin; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPlugin; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.NormalizationPlugin; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ReindexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPlugin; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPlugin; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPlugin; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -/** - * Class to initialize the mongo collections and the {@link Datastore} connection. It also performs - * data initialization tasks if needed. - */ -public class MorphiaDatastoreProviderImpl implements MorphiaDatastoreProvider { - - private static final Logger LOGGER = LoggerFactory.getLogger(MorphiaDatastoreProviderImpl.class); - private Datastore datastore; - - /** - * Constructor to initialize the mongo mappings/collections and the {@link Datastore} connection. - * This also initializes the {@link DatasetIdSequence} that this database uses. This constructor - * is meant to be used when the database is already available. - * - * @param mongoClient {@link MongoClient} - * @param databaseName the database name - */ - public MorphiaDatastoreProviderImpl(MongoClient mongoClient, String databaseName) { - this(mongoClient, databaseName, false); - } - - /** - * Constructor to initialize the mongo mappings/collections and the {@link Datastore} connection. - * This also initializes the {@link DatasetIdSequence} that this database uses. This constructor - * is meant to be used mostly for when the creation of the database is required. - * - * @param mongoClient {@link MongoClient} - * @param databaseName the database name - * @param createIndexes flag that initiates the database/indices - */ - public MorphiaDatastoreProviderImpl(MongoClient mongoClient, String databaseName, - boolean createIndexes) { - createDatastore(mongoClient, databaseName); - if (createIndexes) { - LOGGER.info("Initializing database indices"); - datastore.ensureIndexes(); - } - } - - private void createDatastore(MongoClient mongoClient, String databaseName) { - // Register the mappings and set up the data store. - // TODO: 8/28/20 The mapper options should eventually be removed but requires an update of the affected fields on all documents in the database - final MapperOptions mapperOptions = MapperOptions.builder().discriminatorKey("className") - .discriminator(DiscriminatorFunction.className()) - .collectionNaming(NamingStrategy.identity()).build(); - datastore = Morphia.createDatastore(mongoClient, databaseName, mapperOptions); - final Mapper mapper = datastore.getMapper(); - mapper.getEntityModel(Dataset.class); - mapper.getEntityModel(DatasetIdSequence.class); - mapper.getEntityModel(Workflow.class); - mapper.getEntityModel(WorkflowExecution.class); - mapper.getEntityModel(ScheduledWorkflow.class); - mapper.getEntityModel(DatasetXslt.class); - mapper.getEntityModel(DepublishRecordId.class); - // Plugins - mapper.getEntityModel(AbstractMetisPlugin.class); - mapper.getEntityModel(AbstractExecutablePlugin.class); - mapper.getEntityModel(DepublishPlugin.class); - mapper.getEntityModel(EnrichmentPlugin.class); - mapper.getEntityModel(HTTPHarvestPlugin.class); - mapper.getEntityModel(IndexToPreviewPlugin.class); - mapper.getEntityModel(IndexToPublishPlugin.class); - mapper.getEntityModel(LinkCheckingPlugin.class); - mapper.getEntityModel(MediaProcessPlugin.class); - mapper.getEntityModel(NormalizationPlugin.class); - mapper.getEntityModel(OaipmhHarvestPlugin.class); - mapper.getEntityModel(ReindexToPreviewPlugin.class); - mapper.getEntityModel(ReindexToPublishPlugin.class); - mapper.getEntityModel(TransformationPlugin.class); - mapper.getEntityModel(ValidationExternalPlugin.class); - mapper.getEntityModel(ValidationInternalPlugin.class); - - // Plugins metadata - mapper.getEntityModel(AbstractMetisPluginMetadata.class); - mapper.getEntityModel(AbstractHarvestPluginMetadata.class); - mapper.getEntityModel(AbstractIndexPluginMetadata.class); - mapper.getEntityModel(DepublishPluginMetadata.class); - mapper.getEntityModel(EnrichmentPluginMetadata.class); - mapper.getEntityModel(HTTPHarvestPluginMetadata.class); - mapper.getEntityModel(IndexToPreviewPluginMetadata.class); - mapper.getEntityModel(IndexToPublishPluginMetadata.class); - mapper.getEntityModel(LinkCheckingPluginMetadata.class); - mapper.getEntityModel(MediaProcessPluginMetadata.class); - mapper.getEntityModel(NormalizationPluginMetadata.class); - mapper.getEntityModel(OaipmhHarvestPluginMetadata.class); - mapper.getEntityModel(ReindexToPreviewPluginMetadata.class); - mapper.getEntityModel(ReindexToPublishPluginMetadata.class); - mapper.getEntityModel(TransformationPluginMetadata.class); - mapper.getEntityModel(ValidationExternalPluginMetadata.class); - mapper.getEntityModel(ValidationInternalPluginMetadata.class); - - // Initialize the DatasetIdSequence if required. - if (datastore.find(DatasetIdSequence.class).count() == 0) { - datastore.save(new DatasetIdSequence(0)); - } - LOGGER.info("Datastore initialized"); - } - - /** - * Constructor. In addition to the functionality of {@link #MorphiaDatastoreProviderImpl(MongoClient, - * String)}, it also sets a default non-dataset specific {@link DatasetXslt} if none is present. - * - * @param mongoClient {@link MongoClient} - * @param databaseName the database name - * @param defaultTransformationSupplier The default non-dataset specific {@link DatasetXslt} to - * set if none is available. - * @throws IOException In case the default transformation could not be loaded. - */ - public MorphiaDatastoreProviderImpl(MongoClient mongoClient, String databaseName, - InputStreamProvider defaultTransformationSupplier) throws IOException { - - // Initialize this class. - this(mongoClient, databaseName); - - // Initialize the default DatasetXslt if needed. - final DatasetXsltDao datasetXsltDao = new DatasetXsltDao(this); - if (datasetXsltDao.getLatestDefaultXslt() == null) { - try (final InputStream inputStream = defaultTransformationSupplier.get()) { - final String defaultTransformationAsString = IOUtils - .toString(inputStream, StandardCharsets.UTF_8.name()); - final DatasetXslt defaultTransformation = new DatasetXslt(DatasetXslt.DEFAULT_DATASET_ID, - defaultTransformationAsString); - datasetXsltDao.create(defaultTransformation); - } - } - } - - @Override - public Datastore getDatastore() { - return datastore; - } - - /** - * An interface similar to {@link java.util.function.Supplier}, but specifically for instances of - * {@link InputStream} and that allows the throwing of an {@link IOException}. - */ - public interface InputStreamProvider { - - /** - * @return The input stream. - * @throws IOException In case the stream could not be opened. - */ - InputStream get() throws IOException; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/Authorizer.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/Authorizer.java deleted file mode 100644 index 327a4d10ac..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/Authorizer.java +++ /dev/null @@ -1,156 +0,0 @@ -package eu.europeana.metis.core.service; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.exception.UserUnauthorizedException; - -/** - * This class takes care of all authorization checks for the services. - * - * @author jochen - * - */ -@Service -public class Authorizer { - - private final DatasetDao datasetDao; - - /** - * Constructor. - * - * @param datasetDao The dataset DAO. - */ - @Autowired - public Authorizer(DatasetDao datasetDao) { - this.datasetDao = datasetDao; - } - - /** - * Authorizes writing access to the default XSLT. Will return quietly if authorization succeeds. - * - * @param metisUserView The user wishing to gain access. - * @throws UserUnauthorizedException In case the user is not authorized. - */ - void authorizeWriteDefaultXslt(MetisUserView metisUserView) throws UserUnauthorizedException { - if (metisUserView == null || metisUserView.getAccountRole() != AccountRole.METIS_ADMIN) { - throw new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED); - } - } - - /** - * Authorizes reading access to all datasets. Will return quietly if authorization succeeds. - * - * @param metisUserView The user wishing to gain access. - * @throws UserUnauthorizedException In case the user is not authorized. - */ - void authorizeReadAllDatasets(MetisUserView metisUserView) throws UserUnauthorizedException { - if (metisUserView == null || (metisUserView.getAccountRole() != AccountRole.METIS_ADMIN - && metisUserView.getAccountRole() != AccountRole.EUROPEANA_DATA_OFFICER)) { - throw new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED); - } - } - - /** - * Authorizes reading access to an existing dataset. Will return quietly if authorization - * succeeds. - * - * @param metisUserView The user wishing to gain access. - * @param datasetId The ID of the dataset to which the user wishes to gain access. - * @return The dataset in question. - * @throws UserUnauthorizedException In case the user is not authorized. - * @throws NoDatasetFoundException In case the dataset with the given ID could not be found. - */ - Dataset authorizeReadExistingDatasetById(MetisUserView metisUserView, String datasetId) - throws UserUnauthorizedException, NoDatasetFoundException { - return authorizeExistingDatasetById(metisUserView, datasetId, true); - } - - /** - * Authorizes writing access to an existing dataset. Will return quietly if authorization - * succeeds. - * - * @param metisUserView The user wishing to gain access. - * @param datasetId The ID of the dataset to which the user wishes to gain access. - * @return The dataset in question. - * @throws UserUnauthorizedException In case the user is not authorized. - * @throws NoDatasetFoundException In case the dataset with the given ID could not be found. - */ - Dataset authorizeWriteExistingDatasetById(MetisUserView metisUserView, String datasetId) - throws UserUnauthorizedException, NoDatasetFoundException { - return authorizeExistingDatasetById(metisUserView, datasetId, false); - } - - private Dataset authorizeExistingDatasetById(MetisUserView metisUserView, String datasetId, - boolean allowView) throws UserUnauthorizedException, NoDatasetFoundException { - return authorizeExistingDataset(metisUserView, allowView, () -> { - final Dataset dataset = datasetDao.getDatasetByDatasetId(datasetId); - if (dataset == null) { - throw new NoDatasetFoundException( - String.format("No dataset found with datasetId: '%s' in METIS", datasetId)); - } - return dataset; - }); - } - - /** - * Authorizes reading access to an existing dataset. Will return quietly if authorization - * succeeds. - * - * @param metisUserView The user wishing to gain access. - * @param datasetName The name of the dataset to which the user wishes to gain access. - * @return The dataset in question. - * @throws UserUnauthorizedException In case the user is not authorized. - * @throws NoDatasetFoundException In case the dataset with the given name could not be found. - */ - Dataset authorizeReadExistingDatasetByName(MetisUserView metisUserView, String datasetName) - throws UserUnauthorizedException, NoDatasetFoundException { - return authorizeExistingDataset(metisUserView, true, () -> { - final Dataset dataset = datasetDao.getDatasetByDatasetName(datasetName); - if (dataset == null) { - throw new NoDatasetFoundException( - String.format("No dataset found with datasetName: '%s' in METIS", datasetName)); - } - return dataset; - }); - } - - private Dataset authorizeExistingDataset(MetisUserView metisUserView, boolean allowView, - DatasetSupplier datasetSupplier) throws UserUnauthorizedException, NoDatasetFoundException { - checkUserRoleForIndividualDatasetManagement(metisUserView, allowView); - final Dataset dataset = datasetSupplier.get(); - if (metisUserView.getAccountRole() != AccountRole.METIS_ADMIN - && !metisUserView.getOrganizationId().equals(dataset.getOrganizationId())) { - throw new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED); - } - return dataset; - } - - /** - * Authorizes the creation of a new dataset. Will return quietly if authorization succeeds. - * - * @param metisUserView The user wishing to gain access. - * @throws UserUnauthorizedException In case the user is not authorized. - */ - void authorizeWriteNewDataset(MetisUserView metisUserView) throws UserUnauthorizedException { - checkUserRoleForIndividualDatasetManagement(metisUserView, false); - } - - private void checkUserRoleForIndividualDatasetManagement(MetisUserView metisUserView, boolean allowView) - throws UserUnauthorizedException { - if (metisUserView == null || metisUserView.getAccountRole() == null - || (!allowView && metisUserView.getAccountRole() == AccountRole.PROVIDER_VIEWER)) { - throw new UserUnauthorizedException(CommonStringValues.UNAUTHORIZED); - } - } - - @FunctionalInterface - private interface DatasetSupplier { - Dataset get() throws NoDatasetFoundException; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DatasetService.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DatasetService.java deleted file mode 100644 index 8f41780c64..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DatasetService.java +++ /dev/null @@ -1,689 +0,0 @@ -package eu.europeana.metis.core.service; - -import static java.util.function.Predicate.not; - -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.TransformationParameters; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetSearchView; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.exceptions.DatasetAlreadyExistsException; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoXsltFoundException; -import eu.europeana.metis.core.exceptions.XsltSetupException; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.TransformationPlugin; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.transformation.service.EuropeanaGeneratedIdsMap; -import eu.europeana.metis.transformation.service.EuropeanaIdCreator; -import eu.europeana.metis.transformation.service.EuropeanaIdException; -import eu.europeana.metis.transformation.service.TransformationException; -import eu.europeana.metis.transformation.service.XsltTransformer; -import eu.europeana.metis.utils.CommonStringValues; -import eu.europeana.metis.utils.RestEndpoints; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import java.util.List; -import java.util.UUID; -import org.apache.commons.lang3.StringUtils; -import org.bson.types.ObjectId; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * Contains business logic of how to manipulate datasets in the system using several components. The functionality in this class - * is checked for user authentication. - */ -@Service -public class DatasetService { - - private static final Logger LOGGER = LoggerFactory.getLogger(DatasetService.class); - private static final String DATASET_CREATION_LOCK = "datasetCreationLock"; - private static final int MINIMUM_WORD_LENGTH = 3; - - private final Authorizer authorizer; - private final DatasetDao datasetDao; - private final DatasetXsltDao datasetXsltDao; - private final WorkflowDao workflowDao; - private final WorkflowExecutionDao workflowExecutionDao; - private final ScheduledWorkflowDao scheduledWorkflowDao; - private final RedissonClient redissonClient; - private String metisCoreUrl; //Initialize with setter - - /** - * Constructs the service. - * - * @param datasetDao the Dao instance to access the Dataset database - * @param datasetXsltDao the Dao instance to access the DatasetXslt database - * @param workflowDao the Dao instance to access the Workflow database - * @param workflowExecutionDao the Dao instance to access the WorkflowExecution database - * @param scheduledWorkflowDao the Dao instance to access the ScheduledWorkflow database - * @param redissonClient the redisson client used for distributed locks - * @param authorizer the authorizer for this service - */ - @Autowired - public DatasetService(DatasetDao datasetDao, DatasetXsltDao datasetXsltDao, - WorkflowDao workflowDao, WorkflowExecutionDao workflowExecutionDao, - ScheduledWorkflowDao scheduledWorkflowDao, RedissonClient redissonClient, - Authorizer authorizer) { - this.datasetDao = datasetDao; - this.datasetXsltDao = datasetXsltDao; - this.workflowDao = workflowDao; - this.workflowExecutionDao = workflowExecutionDao; - this.scheduledWorkflowDao = scheduledWorkflowDao; - this.redissonClient = redissonClient; - this.authorizer = authorizer; - } - - /** - * Creates a dataset for a specific {@link MetisUserView} - * - * @param metisUserView the user used to create the dataset - * @param dataset the dataset to be created - * @return the created {@link Dataset} including the extra fields generated from the system - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link DatasetAlreadyExistsException} if the dataset for the same organizationId and datasetName already exists in the system.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some contents were invalid
  • - *
- */ - public Dataset createDataset(MetisUserView metisUserView, Dataset dataset) - throws GenericMetisException { - authorizer.authorizeWriteNewDataset(metisUserView); - - dataset.setOrganizationId(metisUserView.getOrganizationId()); - dataset.setOrganizationName(metisUserView.getOrganizationName()); - - //Lock required for find in the next empty datasetId - RLock lock = redissonClient.getFairLock(DATASET_CREATION_LOCK); - lock.lock(); - - Dataset datasetObjectId; - try { - Dataset storedDataset = datasetDao - .getDatasetByOrganizationIdAndDatasetName(dataset.getOrganizationId(), - dataset.getDatasetName()); - if (storedDataset != null) { - lock.unlock(); - throw new DatasetAlreadyExistsException(String - .format("Dataset with organizationId: %s and datasetName: %s already exists..", - dataset.getOrganizationId(), dataset.getDatasetName())); - } - - dataset.setCreatedByUserId(metisUserView.getUserId()); - dataset.setId(null); - dataset.setUpdatedDate(null); - - dataset.setCreatedDate(new Date()); - //Add fake ecloudDatasetId to avoid null errors in the database - dataset.setEcloudDatasetId(String.format("NOT_CREATED_YET-%s", UUID.randomUUID())); - - int nextInSequenceDatasetId = datasetDao.findNextInSequenceDatasetId(); - dataset.setDatasetId(Integer.toString(nextInSequenceDatasetId)); - verifyReferencesToOldDatasetIds(dataset); - datasetObjectId = datasetDao.create(dataset); - } finally { - lock.unlock(); - } - return datasetObjectId; - } - - /** - * Update an already existent dataset. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param dataset the provided dataset with the changes and the datasetId included in the {@link Dataset} - * @param xsltString the text of the String representation - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link BadContentException} if the dataset has an execution running, contents are invalid.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
  • {@link DatasetAlreadyExistsException} if the request contains a datasetName change and that datasetName already exists for organizationId of metisUser.
  • - *
- */ - public void updateDataset(MetisUserView metisUserView, Dataset dataset, String xsltString) - throws GenericMetisException { - - // Find existing dataset and check authentication. - Dataset storedDataset = authorizer - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - - // Check that the new dataset name does not already exist. - final String newDatasetName = dataset.getDatasetName(); - if (!storedDataset.getDatasetName().equals(newDatasetName) - && datasetDao.getDatasetByOrganizationIdAndDatasetName(metisUserView.getOrganizationId(), - newDatasetName) != null) { - throw new DatasetAlreadyExistsException(String.format( - "Trying to change dataset with datasetName: %s but dataset with organizationId: %s and datasetName: %s already exists", - storedDataset.getDatasetName(), metisUserView.getOrganizationId(), newDatasetName)); - } - - // Check that there is no workflow execution pending for the given dataset. - if (workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId()) != null) { - throw new BadContentException( - String.format("Workflow execution is active for datasetId %s", dataset.getDatasetId())); - } - - // Set/overwrite dataset properties that the user may not determine. - dataset.setOrganizationId(metisUserView.getOrganizationId()); - dataset.setOrganizationName(metisUserView.getOrganizationName()); - dataset.setCreatedByUserId(storedDataset.getCreatedByUserId()); - dataset.setEcloudDatasetId(storedDataset.getEcloudDatasetId()); - dataset.setCreatedDate(storedDataset.getCreatedDate()); - dataset.setOrganizationId(storedDataset.getOrganizationId()); - dataset.setOrganizationName(storedDataset.getOrganizationName()); - dataset.setCreatedByUserId(storedDataset.getCreatedByUserId()); - dataset.setId(storedDataset.getId()); - - verifyReferencesToOldDatasetIds(dataset); - - if (xsltString == null) { - dataset.setXsltId(storedDataset.getXsltId()); - } else { - cleanDatasetXslt(storedDataset.getXsltId()); - dataset.setXsltId( - datasetXsltDao.create(new DatasetXslt(dataset.getDatasetId(), xsltString)).getId()); - } - - // Update the dataset - dataset.setUpdatedDate(new Date()); - datasetDao.update(dataset); - } - - private void verifyReferencesToOldDatasetIds(Dataset dataset) throws BadContentException { - if (dataset.getDatasetIdsToRedirectFrom() != null) { - for (String datasetId : dataset.getDatasetIdsToRedirectFrom()) { - if (datasetDao.getDatasetByDatasetId(datasetId) == null) { - throw new BadContentException( - String.format("Old datasetId for redirection %s doesn't exist", datasetId)); - } - if (dataset.getDatasetId().equals(datasetId)) { - throw new BadContentException( - String.format("datasetId for redirection %s cannot be the same as the current datasetId", datasetId)); - } - } - } - } - - private void cleanDatasetXslt(ObjectId xsltId) { - if (xsltId != null) { - //Check if it's referenced - final WorkflowExecution workflowExecution = workflowExecutionDao - .getAnyByXsltId(xsltId.toString()); - if (workflowExecution == null) { - final DatasetXslt datasetXslt = datasetXsltDao - .getById(xsltId.toString()); - if (datasetXslt != null) { - datasetXsltDao.delete(datasetXslt); - } - } - } - } - - /** - * Delete a dataset from the system - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetId the identifier to find the dataset with - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if the dataset is has an execution running.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
- */ - public void deleteDatasetByDatasetId(MetisUserView metisUserView, String datasetId) - throws GenericMetisException { - - // Find existing dataset and check authentication. - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - - // Check that there is no workflow execution pending for the given dataset. - if (workflowExecutionDao.existsAndNotCompleted(datasetId) != null) { - throw new BadContentException( - String.format("Workflow execution is active for datasteId %s", datasetId)); - } - - //Are there datasets that have a reference to the datasetId that is to be removed - final List datasetsThatHaveAReference = datasetDao.getAllDatasetsByDatasetIdsToRedirectFrom(datasetId); - //Clear references of the datasetId - datasetsThatHaveAReference.forEach(ds -> { - final List datasetIdsToRedirectFrom = ds.getDatasetIdsToRedirectFrom(); - ds.setDatasetIdsToRedirectFrom(datasetIdsToRedirectFrom.stream().filter(not(id -> id.equals(datasetId))).toList()); - datasetDao.update(ds); - }); - - // Delete the dataset. - datasetDao.deleteByDatasetId(datasetId); - - // Clean up dataset leftovers - datasetXsltDao.deleteAllByDatasetId(datasetId); - workflowDao.deleteWorkflow(datasetId); - workflowExecutionDao.deleteAllByDatasetId(datasetId); - scheduledWorkflowDao.deleteAllByDatasetId(datasetId); - } - - /** - * Get a dataset from the system using a datasetName - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetName the string used to find the dataset with - * @return {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset is not found in the system.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - public Dataset getDatasetByDatasetName(MetisUserView metisUserView, String datasetName) - throws GenericMetisException { - return authorizer.authorizeReadExistingDatasetByName(metisUserView, datasetName); - } - - /** - * Get a dataset from the system using a datasetId. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetId the identifier to find the dataset with - * @return {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - public Dataset getDatasetByDatasetId(MetisUserView metisUserView, String datasetId) - throws GenericMetisException { - return authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - } - - /** - * Get the xslt object containing the escaped xslt string using a dataset identifier. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetId the identifier to find the xslt with - * @return the {@link DatasetXslt} object containing the xslt as an escaped string - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - public DatasetXslt getDatasetXsltByDatasetId(MetisUserView metisUserView, - String datasetId) throws GenericMetisException { - Dataset dataset = authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - DatasetXslt datasetXslt = datasetXsltDao.getById(dataset.getXsltId() == null ? null : dataset.getXsltId().toString()); - if (datasetXslt == null) { - throw new NoXsltFoundException(String.format( - "No datasetXslt found for dataset with datasetId: '%s' and xsltId: '%s' in METIS", - datasetId, dataset.getXsltId())); - } - return datasetXslt; - } - - /** - * Get the xslt object containing the escaped xslt string using an xslt identifier. - *

- * It is a method that does not require authentication. - *

- * - * @param xsltId the identifier to find the xslt with - * @return the {@link DatasetXslt} object containing the xslt as an escaped string - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
- */ - public DatasetXslt getDatasetXsltByXsltId(String xsltId) throws GenericMetisException { - DatasetXslt datasetXslt = datasetXsltDao.getById(xsltId); - if (datasetXslt == null) { - throw new NoXsltFoundException(String.format("No datasetXslt found with xsltId: '%s' in METIS", xsltId)); - } - return datasetXslt; - } - - /** - * Create a new default xslt in the database. - *

- * Each dataset can have it's own custom xslt but a default xslt should always be available. Creating a new default xslt will - * create a new {@link DatasetXslt} object and the older one will still be available. The created {@link DatasetXslt} will have - * {@link DatasetXslt#getDatasetId()} equal to -1 to indicate that it is not related to a specific dataset. - *

- * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param xsltString the text of the String representation non escaped - * @return the created {@link DatasetXslt} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - public DatasetXslt createDefaultXslt(MetisUserView metisUserView, String xsltString) - throws GenericMetisException { - authorizer.authorizeWriteDefaultXslt(metisUserView); - DatasetXslt datasetXslt = null; - if (xsltString != null) { - final DatasetXslt latestDefaultXslt = datasetXsltDao.getLatestDefaultXslt(); - if (latestDefaultXslt != null) { - cleanDatasetXslt(latestDefaultXslt.getId()); - } - datasetXslt = datasetXsltDao.create(new DatasetXslt(xsltString)); - } - return datasetXslt; - } - - /** - * Get the latest default xslt. - *

- * It is an method that does not require authentication and it is meant to be used from external service to download the - * corresponding xslt. At the point of writing, ECloud transformation topology is using it. {@link TransformationPlugin} - *

- * - * @return the text representation of the String xslt non escaped - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoXsltFoundException} if the xslt was not found.
  • - *
- */ - public DatasetXslt getLatestDefaultXslt() throws GenericMetisException { - DatasetXslt datasetXslt = datasetXsltDao.getLatestDefaultXslt(); - if (datasetXslt == null) { - throw new NoXsltFoundException("No default datasetXslt found"); - } - return datasetXslt; - } - - /** - * Transform a list of xmls using the latest default xslt stored. - *

- * This method can be used, for example, after a response from - * {@link ProxiesService#getListOfFileContentsFromPluginExecution(MetisUserView, String, ExecutablePluginType, String, int)} to - * try a transformation on a list of xmls just after validation external to preview an example result. - *

- * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetId the dataset identifier, it is required for authentication and for the dataset fields xslt injection - * @param records the list of {@link Record} for which {@link Record#getXmlRecord()} returns a non-null value - * @return a list of {@link Record}s with {@link Record#getXmlRecord()} returning the transformed XML - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the authorization header is un-parsable or the - * user cannot be authorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link NoXsltFoundException} if there is no xslt found
  • - *
  • {@link XsltSetupException} if the XSL transform could not be set up
  • - *
- */ - public List transformRecordsUsingLatestDefaultXslt(MetisUserView metisUserView, String datasetId, - List records) throws GenericMetisException { - //Used for authentication and dataset existence - Dataset dataset = authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - //Using default dataset identifier - DatasetXslt datasetXslt = datasetXsltDao.getLatestDefaultXslt(); - if (datasetXslt == null) { - throw new NoXsltFoundException("Could not find default xslt"); - } - String xsltUrl; - synchronized (this) { - xsltUrl = metisCoreUrl + - RestEndpoints.resolve(RestEndpoints.DATASETS_XSLT_XSLTID, Collections.singletonList(datasetXslt.getId().toString())); - } - return transformRecords(dataset, records, xsltUrl); - } - - /** - * Transform a list of xmls using the latest dataset xslt stored. - *

- * This method can be used, for example, after a response from - * {@link ProxiesService#getListOfFileContentsFromPluginExecution(MetisUserView, String, ExecutablePluginType, String, int)} to - * try a transformation on a list of xmls just after validation external to preview an example result. - *

- * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param datasetId the dataset identifier, it is required for authentication and for the dataset fields xslt injection - * @param records the list of {@link Record} for which {@link Record#getXmlRecord()} returns a non-null value - * @return a list of {@link Record}s with {@link Record#getXmlRecord()} returning the transformed XML - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the authorization header is un-parsable or the - * user cannot be authorized.
  • - *
  • {@link NoDatasetFoundException} if the dataset was not found.
  • - *
  • {@link NoXsltFoundException} if there is no xslt found
  • - *
  • {@link XsltSetupException} if the XSL transform could not be set up
  • - *
- */ - public List transformRecordsUsingLatestDatasetXslt(MetisUserView metisUserView, String datasetId, - List records) throws GenericMetisException { - //Used for authentication and dataset existence - Dataset dataset = authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - if (dataset.getXsltId() == null) { - throw new NoXsltFoundException( - String.format("Could not find xslt for datasetId %s", datasetId)); - } - DatasetXslt datasetXslt = datasetXsltDao.getById(dataset.getXsltId().toString()); - - String xsltUrl; - synchronized (this) { - xsltUrl = metisCoreUrl + RestEndpoints - .resolve(RestEndpoints.DATASETS_XSLT_XSLTID, Collections.singletonList(datasetXslt.getId().toString())); - } - - return transformRecords(dataset, records, xsltUrl); - } - - private List transformRecords(Dataset dataset, Collection records, String xsltUrl) - throws XsltSetupException { - - // Set up transformer. - final EuropeanaIdCreator europeanIdCreator; - final TransformationParameters transformationParameters = new TransformationParameters(dataset); - try (XsltTransformer transformer = new XsltTransformer(xsltUrl, transformationParameters.getDatasetName(), - transformationParameters.getEdmCountry(), transformationParameters.getEdmLanguage())) { - europeanIdCreator = new EuropeanaIdCreator(); - - // Transform the records. - return records.stream().map(record -> { - try { - EuropeanaGeneratedIdsMap europeanaGeneratedIdsMap = europeanIdCreator - .constructEuropeanaId(record.getXmlRecord(), dataset.getDatasetId()); - return new Record(record.getEcloudId(), - transformer.transform(record.getXmlRecord().getBytes(StandardCharsets.UTF_8), europeanaGeneratedIdsMap).toString()); - } catch (TransformationException e) { - LOGGER.info("Record from list failed transformation", e); - return new Record(record.getEcloudId(), e.getMessage()); - } catch (EuropeanaIdException e) { - LOGGER.info(CommonStringValues.EUROPEANA_ID_CREATOR_INITIALIZATION_FAILED, e); - return new Record(record.getEcloudId(), e.getMessage()); - } - }).toList(); - } catch (TransformationException e) { - throw new XsltSetupException("Could not setup XSL transformation.", e); - } catch (EuropeanaIdException e) { - throw new XsltSetupException(CommonStringValues.EUROPEANA_ID_CREATOR_INITIALIZATION_FAILED, - e); - } - } - - /** - * Get all datasets using the provider field. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param provider the provider string used to find the datasets - * @param nextPage the nextPage token or -1 - * @return {@link List} of {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public List getAllDatasetsByProvider( - MetisUserView metisUserView, String provider, int nextPage) - throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - return datasetDao.getAllDatasetsByProvider(provider, nextPage); - } - - /** - * Get all datasets using the intermediateProvider field. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param intermediateProvider the intermediateProvider string used to find the datasets - * @param nextPage the nextPage token or -1 - * @return {@link List} of {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public List getAllDatasetsByIntermediateProvider( - MetisUserView metisUserView, String intermediateProvider, - int nextPage) throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - return datasetDao.getAllDatasetsByIntermediateProvider(intermediateProvider, nextPage); - } - - /** - * Get all datasets using the dataProvider field. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param dataProvider the dataProvider string used to find the datasets - * @param nextPage the nextPage token or -1 - * @return {@link List} of {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public List getAllDatasetsByDataProvider( - MetisUserView metisUserView, String dataProvider, - int nextPage) throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - return datasetDao.getAllDatasetsByDataProvider(dataProvider, nextPage); - } - - /** - * Get all datasets using the organizationId field. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param organizationId the organizationId string used to find the datasets - * @param nextPage the nextPage number or -1 - * @return {@link List} of {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public List getAllDatasetsByOrganizationId( - MetisUserView metisUserView, String organizationId, int nextPage) - throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - return datasetDao.getAllDatasetsByOrganizationId(organizationId, nextPage); - } - - /** - * Get all datasets using the organizationName field. - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param organizationName the organizationName string used to find the datasets - * @param nextPage the nextPage number or -1 - * @return {@link List} of {@link Dataset} - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public List getAllDatasetsByOrganizationName( - MetisUserView metisUserView, String organizationName, int nextPage) - throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - return datasetDao.getAllDatasetsByOrganizationName(organizationName, nextPage); - } - - /** - * Get the list of of matching DatasetSearch using dataset - * - * @param metisUserView the {@link MetisUserView} to authorize with - * @param searchString a string that may contain multiple words separated by spaces. - *

The search will be performed on the fields datasetId, datasetName, provider, dataProvider. - * The words that start with a numeric character will be considered as part of the datasetId search and that field is searched - * as a "starts with" operation. All words that from a certain length threshold and above e.g. 3 will be used, as AND - * operations, for searching the fields datasetName, provider, dataProvider

- * @param nextPage the nextPage number, must be positive - * @return a list with the dataset search view results - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if the parameters provided are invalid.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized.
  • - *
- */ - public List searchDatasetsBasedOnSearchString(MetisUserView metisUserView, - String searchString, - int nextPage) throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - if (StringUtils.isBlank(searchString)) { - throw new BadContentException("Parameter searchString cannot be blank"); - } - final String[] words = searchString.split("\\s+"); - final List datasetIdWords = Arrays.stream(words) - .filter(word -> Character.isDigit(word.charAt(0))).toList(); - - final List minimumLengthWords = Arrays.stream(words) - .filter(word -> word.length() >= MINIMUM_WORD_LENGTH).toList(); - - List datasets = new ArrayList<>(); - if (!datasetIdWords.isEmpty() || !minimumLengthWords.isEmpty()) { - datasets = datasetDao - .searchDatasetsBasedOnSearchString(datasetIdWords, minimumLengthWords, nextPage); - } - - return datasets.stream().map(dataset -> { - final PluginWithExecutionId latestSuccessfulExecutablePlugin = workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(dataset.getDatasetId(), - EnumSet.allOf(ExecutablePluginType.class), false); - final DatasetSearchView datasetSearchView = new DatasetSearchView(); - datasetSearchView.setDatasetId(dataset.getDatasetId()); - datasetSearchView.setDatasetName(dataset.getDatasetName()); - datasetSearchView.setProvider(dataset.getProvider()); - datasetSearchView.setDataProvider(dataset.getDataProvider()); - if (latestSuccessfulExecutablePlugin != null) { - datasetSearchView - .setLastExecutionDate(latestSuccessfulExecutablePlugin.getPlugin().getStartedDate()); - } - return datasetSearchView; - } - ).toList(); - } - - public int getDatasetsPerRequestLimit() { - return datasetDao.getDatasetsPerRequest(); - } - - public void setMetisCoreUrl(String metisCoreUrl) { - synchronized (this) { - this.metisCoreUrl = metisCoreUrl; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DepublishRecordIdService.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DepublishRecordIdService.java deleted file mode 100644 index 556207738c..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/DepublishRecordIdService.java +++ /dev/null @@ -1,258 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation.PublicationStatus; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.rest.DepublishRecordIdView; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.common.RecordIdUtils; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * Service object for all operations concerning depublished records. The functionality in this class - * is checked for user authentication. - */ -@Service -public class DepublishRecordIdService { - - private final Authorizer authorizer; - private final OrchestratorService orchestratorService; - private final DepublishRecordIdDao depublishRecordIdDao; - - /** - * Constructor. - * - * @param authorizer The authorizer for checking permissions. - * @param orchestratorService The orchestrator service - * @param depublishRecordIdDao The DAO for depublished records. - */ - @Autowired - public DepublishRecordIdService(Authorizer authorizer, OrchestratorService orchestratorService, - DepublishRecordIdDao depublishRecordIdDao) { - this.authorizer = authorizer; - this.orchestratorService = orchestratorService; - this.depublishRecordIdDao = depublishRecordIdDao; - } - - /** - * Adds a list of record ids to be depublished for the dataset. - * - * @param metisUserView The user performing this operation. - * @param datasetId The ID of the dataset to which the depublished records belong. - * @param recordIdsInSeparateLines The string containing the record IDs in separate lines. - * @return How many of the passed records were in fact added. This counter is not thread-safe: if - * multiple threads try to add the same records, their combined counters may overrepresent the - * number of records that were actually added. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content or the operation were invalid
  • - *
- */ - public int addRecordIdsToBeDepublished(MetisUserView metisUserView, String datasetId, - String recordIdsInSeparateLines) throws GenericMetisException { - - // Authorize. - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - - // Check and normalize the record IDs. - final Set normalizedRecordIds = checkAndNormalizeRecordIds(datasetId, - recordIdsInSeparateLines); - - // Add the records. - return depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, normalizedRecordIds); - } - - /** - * Deletes a list of record ids from the database. Only record ids that are in a {@link - * eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus#PENDING_DEPUBLICATION} - * state will be removed. - * - * @param metisUserView The user performing this operation. - * @param datasetId The ID of the dataset to which the depublish record ids belong. - * @param recordIdsInSeparateLines The string containing the record IDs in separate lines. - * @return The number or record ids that were removed. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content or the operation were invalid
  • - *
- */ - public Long deletePendingRecordIds(MetisUserView metisUserView, String datasetId, - String recordIdsInSeparateLines) throws GenericMetisException { - - // Authorize. - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - - // Check and normalize the record IDs (Just in case). - final Set normalizedRecordIds = checkAndNormalizeRecordIds(datasetId, - recordIdsInSeparateLines); - - // Delete the records. - return depublishRecordIdDao.deletePendingRecordIds(datasetId, normalizedRecordIds); - } - - /** - * Retrieve the list of depublish record ids for a specific dataset. - *

Ids are retrieved regardless of their status

- * - * @param metisUserView The user performing this operation. Cannot be null. - * @param datasetId The ID of the dataset for which to retrieve the records. Cannot be null. - * @param page The page to retrieve. Cannot be null. - * @param sortField The field on which to sort. Cannot be null. - * @param sortDirection The direction in which to sort. Cannot be null. - * @param searchQuery Search query for the record ID. Can be null. - * @return A list of records. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset for datasetId was not found.
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
- */ - public ResponseListWrapper getDepublishRecordIds( - MetisUserView metisUserView, - String datasetId, int page, DepublishRecordIdSortField sortField, - SortDirection sortDirection, String searchQuery) throws GenericMetisException { - - // Authorize. - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - - // Get the page of records - final List records = depublishRecordIdDao - .getDepublishRecordIds(datasetId, page, sortField, sortDirection, searchQuery); - - // Compile the result - final ResponseListWrapper result = new ResponseListWrapper<>(); - result.setResultsAndLastPage(records, depublishRecordIdDao.getPageSize(), page); - return result; - } - - /** - * Creates a workflow with one plugin {@link eu.europeana.metis.core.workflow.plugins.DepublishPlugin}. - *

The plugin will contain {@link DepublishPluginMetadata} that contain information of whether - * the depublication is for an entire dataset or for individual records ids. Those ids are either - * provided or all of the ids, previously populated, from the database will be used. Only ids in - * {@link eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus#PENDING_DEPUBLICATION} - * will be attempted for depublication.

- * - * @param metisUserView The user performing this operation. Cannot be null. - * @param datasetId The ID of the dataset for which to retrieve the records. Cannot be null. - * @param datasetDepublish true for dataset depublication, false for record depublication - * @param priority the priority of the execution in case the system gets overloaded, 0 lowest, 10 - * highest - * @param recordIdsInSeparateLines the specific pending record ids to depublish. Only record ids - * that are marked as {@link eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus#PENDING_DEPUBLICATION} - * in the database will be attempted for depublication. - * @return the WorkflowExecution object that was generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link BadContentException} if the workflow is empty or no plugin enabled
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if there was an exception when - * contacting the external resource(ECloud)
  • - *
  • {@link eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed} if the execution of - * the first plugin was not allowed, because a valid source plugin could not be found
  • - *
  • {@link eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException} if a - * workflow execution for the generated execution identifier already exists, almost impossible to - * happen since ids are UUIDs
  • - *
- */ - public WorkflowExecution createAndAddInQueueDepublishWorkflowExecution( - MetisUserView metisUserView, - String datasetId, boolean datasetDepublish, int priority, String recordIdsInSeparateLines) - throws GenericMetisException { - // Authorize. - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - - //Prepare depublish workflow, do not save in the database. Only create workflow execution - final Workflow workflow = new Workflow(); - workflow.setDatasetId(datasetId); - final DepublishPluginMetadata depublishPluginMetadata = new DepublishPluginMetadata(); - depublishPluginMetadata.setEnabled(true); - depublishPluginMetadata.setDatasetDepublish(datasetDepublish); - if (StringUtils.isNotBlank(recordIdsInSeparateLines)) { - // Check and normalize the record IDs (Just in case). - final Set normalizedRecordIds = checkAndNormalizeRecordIds(datasetId, - recordIdsInSeparateLines); - depublishPluginMetadata.setRecordIdsToDepublish(normalizedRecordIds); - } - workflow.setMetisPluginsMetadata(Collections.singletonList(depublishPluginMetadata)); - - return orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, datasetId, workflow, null, priority); - } - - /** - * Determines whether a depublication can be triggered at this point. This is the case lf: - *
    - *
  • - * No workflow is currently in progress, and - *
  • - *
  • - * The dataset has the status 'published' (as opposed to 'depublished' or 'neither'), and - *
  • - *
  • - * The records in the dataset are ready for viewing. - *
  • - *
- * - * @param metisUserView The user performing this operation. Cannot be null. - * @param datasetId The ID of the dataset for which to retrieve the records. Cannot be null. - * @return Whether a depublication can be triggered. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoDatasetFoundException} if the dataset - * identifier provided does not exist
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - public boolean canTriggerDepublication(MetisUserView metisUserView, String datasetId) - throws GenericMetisException { - - // Authorize. - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - - // Compute the result. - final boolean result; - if (orchestratorService.getRunningOrInQueueExecution(datasetId) != null) { - // If a workflow execution is currently in progress, we can't depublish. - result = false; - } else { - // If a (re-)index took place recently, or the status is not published, we can't depublish. - final DatasetExecutionInformation executionInformation = orchestratorService - .getDatasetExecutionInformation(datasetId); - result = executionInformation.getPublicationStatus() == PublicationStatus.PUBLISHED && - executionInformation.isLastPublishedRecordsReadyForViewing(); - } - - // Done - return result; - } - - Set checkAndNormalizeRecordIds(String datasetId, - String recordIdsInSeparateLines) throws BadContentException { - return RecordIdUtils.checkAndNormalizeRecordIds(datasetId, recordIdsInSeparateLines); - } - -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/OrchestratorService.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/OrchestratorService.java deleted file mode 100644 index dc6954e8bb..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/OrchestratorService.java +++ /dev/null @@ -1,1036 +0,0 @@ -package eu.europeana.metis.core.service; - -import com.google.common.collect.Sets; -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.dao.WorkflowValidationUtils; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation.PublicationStatus; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.exceptions.WorkflowAlreadyExistsException; -import eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException; -import eu.europeana.metis.core.execution.WorkflowExecutorManager; -import eu.europeana.metis.core.rest.ExecutionHistory; -import eu.europeana.metis.core.rest.ExecutionHistory.Execution; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability.PluginWithDataAvailability; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.rest.VersionEvolution; -import eu.europeana.metis.core.rest.VersionEvolution.VersionEvolutionStep; -import eu.europeana.metis.core.rest.execution.details.WorkflowExecutionView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionAndDatasetView; -import eu.europeana.metis.core.workflow.SystemId; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.DepublishPlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.DateUtils; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.function.IntConsumer; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.lang.Nullable; -import org.springframework.stereotype.Service; - -/** - * Service class that controls the communication between the different DAOs of the system. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-05-24 - */ -@Service -public class OrchestratorService { - - private static final Logger LOGGER = LoggerFactory.getLogger(OrchestratorService.class); - //Use with String.format to suffix the datasetId - private static final String EXECUTION_FOR_DATASETID_SUBMITION_LOCK = "EXECUTION_FOR_DATASETID_SUBMITION_LOCK_%s"; - - public static final Set HARVEST_TYPES = Sets - .immutableEnumSet(ExecutablePluginType.HTTP_HARVEST, ExecutablePluginType.OAIPMH_HARVEST); - public static final Set EXECUTABLE_PREVIEW_TYPES = Sets - .immutableEnumSet(ExecutablePluginType.PREVIEW); - public static final Set EXECUTABLE_PUBLISH_TYPES = Sets - .immutableEnumSet(ExecutablePluginType.PUBLISH); - public static final Set EXECUTABLE_DEPUBLISH_TYPES = Sets - .immutableEnumSet(ExecutablePluginType.DEPUBLISH); - public static final Set PREVIEW_TYPES = Sets - .immutableEnumSet(PluginType.PREVIEW, PluginType.REINDEX_TO_PREVIEW); - public static final Set PUBLISH_TYPES = Sets - .immutableEnumSet(PluginType.PUBLISH, PluginType.REINDEX_TO_PUBLISH); - public static final Set NO_XML_PREVIEW_TYPES = Sets - .immutableEnumSet(ExecutablePluginType.LINK_CHECKING, ExecutablePluginType.DEPUBLISH); - - private final WorkflowExecutionDao workflowExecutionDao; - private final WorkflowValidationUtils workflowValidationUtils; - private final DataEvolutionUtils dataEvolutionUtils; - private final WorkflowDao workflowDao; - private final DatasetDao datasetDao; - private final WorkflowExecutorManager workflowExecutorManager; - private final RedissonClient redissonClient; - private final Authorizer authorizer; - private final WorkflowExecutionFactory workflowExecutionFactory; - private final DepublishRecordIdDao depublishRecordIdDao; - private int solrCommitPeriodInMins; // Use getter and setter for this field! - - /** - * Constructor with all the required parameters - * - * @param workflowExecutionFactory the orchestratorHelper instance - * @param workflowDao the Dao instance to access the Workflow database - * @param workflowExecutionDao the Dao instance to access the WorkflowExecution database - * @param workflowValidationUtils A utilities class providing more functionality on top of DAOs. - * @param dataEvolutionUtils A utilities class providing more functionality on top of DAOs. - * @param datasetDao the Dao instance to access the Dataset database - * @param workflowExecutorManager the instance that handles the production and consumption of workflowExecutions - * @param redissonClient the instance of Redisson library that handles distributed locks - * @param authorizer the authorizer - * @param depublishRecordIdDao the Dao instance to access the DepublishRecordId database - */ - @Autowired - public OrchestratorService(WorkflowExecutionFactory workflowExecutionFactory, - WorkflowDao workflowDao, WorkflowExecutionDao workflowExecutionDao, - WorkflowValidationUtils workflowValidationUtils, DataEvolutionUtils dataEvolutionUtils, - DatasetDao datasetDao, WorkflowExecutorManager workflowExecutorManager, - RedissonClient redissonClient, Authorizer authorizer, - DepublishRecordIdDao depublishRecordIdDao) { - this.workflowExecutionFactory = workflowExecutionFactory; - this.workflowDao = workflowDao; - this.workflowExecutionDao = workflowExecutionDao; - this.workflowValidationUtils = workflowValidationUtils; - this.dataEvolutionUtils = dataEvolutionUtils; - this.datasetDao = datasetDao; - this.workflowExecutorManager = workflowExecutorManager; - this.redissonClient = redissonClient; - this.authorizer = authorizer; - this.depublishRecordIdDao = depublishRecordIdDao; - } - - /** - * Create a workflow using a datasetId and the {@link Workflow} that contains the requested plugins. If plugins are disabled, - * they (their settings) are still saved. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the identifier of the dataset for which the workflow should be created - * @param workflow the workflow with the plugins requested - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link WorkflowAlreadyExistsException} if a workflow for the dataset identifier provided - * already exists
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
  • {@link BadContentException} if the workflow parameters have unexpected values
  • - *
- */ - public void createWorkflow(MetisUserView metisUserView, String datasetId, Workflow workflow, - ExecutablePluginType enforcedPredecessorType) throws GenericMetisException { - - // Authorize (check dataset existence) and set dataset ID to avoid discrepancy. - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - workflow.setDatasetId(datasetId); - - // Check that the workflow does not yet exist. - if (workflowDao.workflowExistsForDataset(workflow.getDatasetId())) { - throw new WorkflowAlreadyExistsException( - String.format("Workflow with datasetId: %s, already exists", workflow.getDatasetId())); - } - - // Validate the new workflow. - workflowValidationUtils.validateWorkflowPlugins(workflow, enforcedPredecessorType); - - // Save the workflow. - workflowDao.create(workflow); - } - - /** - * Update an already existent workflow using a datasetId and the {@link Workflow} that contains the requested plugins. If - * plugins are disabled, they (their settings) are still saved. Any settings in plugins that are not sent in the request are - * removed. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the identifier of the dataset for which the workflow should be updated - * @param workflow the workflow with the plugins requested - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoWorkflowFoundException} if a workflow for the dataset identifier provided does - * not exist
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
  • {@link BadContentException} if the workflow parameters have unexpected values
  • - *
- */ - public void updateWorkflow(MetisUserView metisUserView, String datasetId, Workflow workflow, - ExecutablePluginType enforcedPredecessorType) throws GenericMetisException { - - // Authorize (check dataset existence) and set dataset ID to avoid discrepancy. - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - workflow.setDatasetId(datasetId); - - // Get the current workflow in the database. If it doesn't exist, throw exception. - final Workflow storedWorkflow = workflowDao.getWorkflow(workflow.getDatasetId()); - if (storedWorkflow == null) { - throw new NoWorkflowFoundException( - String.format("Workflow with datasetId: %s, not found", workflow.getDatasetId())); - } - - // Validate the new workflow. - workflowValidationUtils.validateWorkflowPlugins(workflow, enforcedPredecessorType); - - // Overwrite the workflow. - workflow.setId(storedWorkflow.getId()); - workflowDao.update(workflow); - } - - /** - * Deletes a workflow. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier that corresponds to the workflow to be deleted - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public void deleteWorkflow(MetisUserView metisUserView, String datasetId) throws GenericMetisException { - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - workflowDao.deleteWorkflow(datasetId); - } - - /** - * Get a workflow for a dataset identifier. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier - * @return the Workflow object - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public Workflow getWorkflow(MetisUserView metisUserView, String datasetId) throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - return workflowDao.getWorkflow(datasetId); - } - - /** - * Get a WorkflowExecution using an execution identifier. - * - * @param metisUserView the user wishing to perform this operation - * @param executionId the execution identifier - * @return the WorkflowExecution object - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public WorkflowExecution getWorkflowExecutionByExecutionId(MetisUserView metisUserView, - String executionId) throws GenericMetisException { - final WorkflowExecution result = workflowExecutionDao.getById(executionId); - if (result != null) { - authorizer.authorizeReadExistingDatasetById(metisUserView, result.getDatasetId()); - } - return result; - } - - /** - *

Does checking, prepares and adds a WorkflowExecution in the queue. That means it updates - * the status of the WorkflowExecution to {@link WorkflowStatus#INQUEUE}, adds it to the database and also it's identifier goes - * into the distributed queue of WorkflowExecutions. The source data for the first plugin in the workflow can be controlled, if - * required, from the {@code enforcedPredecessorType}, which means that the last valid plugin that is provided with that - * parameter, will be used as the source data.

- *

Please note: this method is not checked for authorization: it is only meant to be - * called from a scheduled task.

- * - * @param datasetId the dataset identifier for which the execution will take place - * @param workflowProvided optional, the workflow to use instead of retrieving the saved one from the db - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @param priority the priority of the execution in case the system gets overloaded, 0 lowest, 10 highest - * @return the WorkflowExecution object that was generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoWorkflowFoundException} if a workflow for the dataset identifier provided does - * not exist
  • - *
  • {@link BadContentException} if the workflow is empty or no plugin enabled
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link ExternalTaskException} if there was an exception when contacting the external - * resource(ECloud)
  • - *
  • {@link PluginExecutionNotAllowed} if the execution of the first plugin was not allowed, - * because a valid source plugin could not be found
  • - *
  • {@link WorkflowExecutionAlreadyExistsException} if a workflow execution for the generated - * execution identifier already exists, almost impossible to happen since ids are UUIDs
  • - *
- */ - public WorkflowExecution addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization( - String datasetId, @Nullable Workflow workflowProvided, - @Nullable ExecutablePluginType enforcedPredecessorType, int priority) - throws GenericMetisException { - final Dataset dataset = datasetDao.getDatasetByDatasetId(datasetId); - if (dataset == null) { - throw new NoDatasetFoundException( - String.format("No dataset found with datasetId: %s, in METIS", datasetId)); - } - return addWorkflowInQueueOfWorkflowExecutions(dataset, workflowProvided, - enforcedPredecessorType, priority, null); - } - - /** - * Does checking, prepares and adds a WorkflowExecution in the queue. That means it updates the status of the WorkflowExecution - * to {@link WorkflowStatus#INQUEUE}, adds it to the database and also it's identifier goes into the distributed queue of - * WorkflowExecutions. The source data for the first plugin in the workflow can be controlled, if required, from the {@code - * enforcedPredecessorType}, which means that the last valid plugin that is provided with that parameter, will be used as the - * source data. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier for which the execution will take place - * @param workflowProvided optional, the workflow to use instead of retrieving the saved one from the db - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @param priority the priority of the execution in case the system gets overloaded, 0 lowest, 10 highest - * @return the WorkflowExecution object that was generated - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoWorkflowFoundException} if a workflow for the dataset identifier provided does - * not exist
  • - *
  • {@link BadContentException} if the workflow is empty or no plugin enabled
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
  • {@link ExternalTaskException} if there was an exception when contacting the external - * resource(ECloud)
  • - *
  • {@link PluginExecutionNotAllowed} if the execution of the first plugin was not allowed, - * because a valid source plugin could not be found
  • - *
  • {@link WorkflowExecutionAlreadyExistsException} if a workflow execution for the generated - * execution identifier already exists, almost impossible to happen since ids are UUIDs
  • - *
- */ - public WorkflowExecution addWorkflowInQueueOfWorkflowExecutions(MetisUserView metisUserView, - String datasetId, @Nullable Workflow workflowProvided, - @Nullable ExecutablePluginType enforcedPredecessorType, - int priority) - throws GenericMetisException { - final Dataset dataset = authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - return addWorkflowInQueueOfWorkflowExecutions(dataset, workflowProvided, - enforcedPredecessorType, priority, metisUserView); - } - - private WorkflowExecution addWorkflowInQueueOfWorkflowExecutions(Dataset dataset, - @Nullable Workflow workflowProvided, - @Nullable ExecutablePluginType enforcedPredecessorType, - int priority, MetisUserView metisUserView) - throws GenericMetisException { - - // Get the workflow or use the one provided. - final Workflow workflow; - if (Objects.isNull(workflowProvided)) { - workflow = workflowDao.getWorkflow(dataset.getDatasetId()); - } else { - workflow = workflowProvided; - } - if (workflow == null) { - throw new NoWorkflowFoundException( - String.format("No workflow found with datasetId: %s, in METIS", dataset.getDatasetId())); - } - - // Validate the workflow and obtain the predecessor. - final PluginWithExecutionId predecessor = workflowValidationUtils - .validateWorkflowPlugins(workflow, enforcedPredecessorType); - - // Make sure that eCloud knows tmetisUserhis dataset (needs to happen before we create the workflow). - datasetDao.checkAndCreateDatasetInEcloud(dataset); - - // Create the workflow execution (without adding it to the database). - final WorkflowExecution workflowExecution = workflowExecutionFactory - .createWorkflowExecution(workflow, dataset, predecessor, priority); - - // Obtain the lock. - RLock executionDatasetIdLock = redissonClient - .getFairLock(String.format(EXECUTION_FOR_DATASETID_SUBMITION_LOCK, dataset.getDatasetId())); - executionDatasetIdLock.lock(); - - // Add the workflow execution to the database. Then release the lock. - final String objectId; - try { - String storedWorkflowExecutionId = workflowExecutionDao - .existsAndNotCompleted(dataset.getDatasetId()); - if (storedWorkflowExecutionId != null) { - throw new WorkflowExecutionAlreadyExistsException(String - .format("Workflow execution already exists with id %s and is not completed", - storedWorkflowExecutionId)); - } - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - if (metisUserView == null || metisUserView.getUserId() == null) { - workflowExecution.setStartedBy(SystemId.STARTED_BY_SYSTEM.name()); - } else { - workflowExecution.setStartedBy(metisUserView.getUserId()); - } - workflowExecution.setCreatedDate(new Date()); - objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - } finally { - executionDatasetIdLock.unlock(); - } - - // Add the workflow execution to the queue. - workflowExecutorManager.addWorkflowExecutionToQueue(objectId, priority); - LOGGER.info("WorkflowExecution with id: {}, added to execution queue", objectId); - - // Done. Get a fresh copy of the workflow execution to return. - return workflowExecutionDao.getById(objectId); - } - - /** - * Request to cancel a workflow execution. The execution will go into a cancelling state until it's properly {@link - * WorkflowStatus#CANCELLED} from the system - * - * @param metisUserView the user wishing to perform this operation - * @param executionId the execution identifier of the execution to cancel - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoWorkflowExecutionFoundException} if no worklfowExecution could be found
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public void cancelWorkflowExecution(MetisUserView metisUserView, String executionId) - throws GenericMetisException { - - WorkflowExecution workflowExecution = workflowExecutionDao.getById(executionId); - if (workflowExecution != null) { - authorizer.authorizeWriteExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - } - if (workflowExecution != null && ( - workflowExecution.getWorkflowStatus() == WorkflowStatus.RUNNING - || workflowExecution.getWorkflowStatus() == WorkflowStatus.INQUEUE)) { - workflowExecutionDao.setCancellingState(workflowExecution, metisUserView); - LOGGER.info("Cancelling user workflow execution with id: {}", workflowExecution.getId()); - } else { - throw new NoWorkflowExecutionFoundException(String - .format("Running workflowExecution with executionId: %s, does not exist or not active", - executionId)); - } - } - - /** - * The number of WorkflowExecutions that would be returned if a get all request would be performed. - * - * @return the number representing the size during a get all request - */ - public int getWorkflowExecutionsPerRequest() { - return workflowExecutionDao.getWorkflowExecutionsPerRequest(); - } - - /** - * Check if a specified {@code pluginType} is allowed for execution. This is checked based on, if there was a previous - * successful finished plugin that follows a specific order (unless the {@code enforcedPredecessorType} is used) and that has - * the latest successful harvest plugin as an ancestor. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier of which the executions are based on - * @param pluginType the pluginType to be checked for allowance of execution - * @param enforcedPredecessorType optional, the plugin type to be used as source data - * @return the abstractMetisPlugin that the execution on {@code pluginType} will be based on. Can be null if the {@code - * pluginType} is the first one in the total order of executions e.g. One of the harvesting plugins. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link PluginExecutionNotAllowed} if the no plugin was found so the {@code pluginType} - * will be based upon
  • - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public ExecutablePlugin getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution( - MetisUserView metisUserView, String datasetId, ExecutablePluginType pluginType, - ExecutablePluginType enforcedPredecessorType) throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - return Optional.ofNullable( - dataEvolutionUtils.computePredecessorPlugin(pluginType, enforcedPredecessorType, datasetId)) - .map(PluginWithExecutionId::getPlugin).orElse(null); - } - - /** - * Get all WorkflowExecutions paged. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier filter, can be null to get all datasets - * @param workflowStatuses a set of workflow statuses to filter, can be empty or null - * @param orderField the field to be used to sort the results - * @param ascending a boolean value to request the ordering to ascending or descending - * @param nextPage the nextPage token - * @return A list of all the WorkflowExecutions found. If the user is not admin, the list is filtered to only show those - * executions that are in the user's organization. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public ResponseListWrapper getAllWorkflowExecutions( - MetisUserView metisUserView, - String datasetId, Set workflowStatuses, DaoFieldNames orderField, - boolean ascending, int nextPage) throws GenericMetisException { - - // Authorize - if (datasetId == null) { - authorizer.authorizeReadAllDatasets(metisUserView); - } else { - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - } - - // Determine the dataset IDs to filter on. - final Set datasetIds; - if (datasetId == null) { - datasetIds = getDatasetIdsToFilterOn(metisUserView); - } else { - datasetIds = Collections.singleton(datasetId); - } - - // Find the executions. - final ResultList data = workflowExecutionDao - .getAllWorkflowExecutions(datasetIds, workflowStatuses, orderField, ascending, nextPage, 1, - false); - - // Compile and return the result. - final List convertedData = data.getResults().stream().map( - execution -> new WorkflowExecutionView(execution, isIncremental(execution), - OrchestratorService::canDisplayRawXml)).toList(); - final ResponseListWrapper result = new ResponseListWrapper<>(); - result.setResultsAndLastPage(convertedData, getWorkflowExecutionsPerRequest(), nextPage, - data.isMaxResultCountReached()); - return result; - } - - /** - * Checks if a workflow execution is an incremental one based on root ancestor information - * - * @param workflowExecution the workflow execution to check - * @return true if incremental, false otherwise - */ - private boolean isIncremental(WorkflowExecution workflowExecution) { - final AbstractMetisPlugin firstPluginInList = workflowExecution.getMetisPlugins().getFirst(); - // Non-executable plugins are not to be checked - if (!(firstPluginInList instanceof AbstractExecutablePlugin)) { - return false; - } - - final ExecutablePlugin harvestPlugin = new DataEvolutionUtils(workflowExecutionDao) - .getRootAncestor(new PluginWithExecutionId<>(workflowExecution, - ((AbstractExecutablePlugin) firstPluginInList))) - .getPlugin(); - - // depublication can also be a root ancestor. - if (harvestPlugin.getPluginMetadata().getExecutablePluginType() - == ExecutablePluginType.DEPUBLISH) { - return false; - } - - // Check the harvesting types - if (!DataEvolutionUtils.getHarvestPluginGroup() - .contains(harvestPlugin.getPluginMetadata().getExecutablePluginType())) { - throw new IllegalStateException(String.format( - "workflowExecutionId: %s, pluginId: %s - Found plugin root that is not a harvesting plugin.", - workflowExecution.getId(), harvestPlugin.getId())); - } - return (harvestPlugin.getPluginMetadata() instanceof AbstractHarvestPluginMetadata abstractHarvestPluginMetadata) - && abstractHarvestPluginMetadata.isIncrementalHarvest(); - } - - /** - * Get the overview of WorkflowExecutions. This returns a list of executions ordered to display an overview. First the ones in - * queue, then those in progress and then those that are finalized. They will be sorted by creation date. This method does - * support pagination. - * - * @param metisUserView the user wishing to perform this operation - * @param pluginStatuses the plugin statuses to filter. Can be null. - * @param pluginTypes the plugin types to filter. Can be null. - * @param fromDate the date from where the results should start. Can be null. - * @param toDate the date to where the results should end. Can be null. - * @param nextPage the nextPage token, the end of the list is marked with -1 on the response - * @param pageCount the number of pages that are requested - * @return a list of all the WorkflowExecutions together with the datasets that they belong to. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - public ResponseListWrapper getWorkflowExecutionsOverview( - MetisUserView metisUserView, Set pluginStatuses, Set pluginTypes, - Date fromDate, Date toDate, int nextPage, int pageCount) throws GenericMetisException { - authorizer.authorizeReadAllDatasets(metisUserView); - final Set datasetIds = getDatasetIdsToFilterOn(metisUserView); - final ResultList resultList; - if (datasetIds == null || !datasetIds.isEmpty()) { - //Match results filtering using specified dataset ids or without dataset id filter if it's null - resultList = workflowExecutionDao - .getWorkflowExecutionsOverview(datasetIds, pluginStatuses, pluginTypes, fromDate, toDate, - nextPage, pageCount); - } else { - //Result should be empty if dataset set is empty - resultList = new ResultList<>(Collections.emptyList(), false); - } - final List views = resultList.getResults().stream() - .map(result -> new ExecutionAndDatasetView(result.getExecution(), result.getDataset())) - .toList(); - final ResponseListWrapper result = new ResponseListWrapper<>(); - result.setResultsAndLastPage(views, getWorkflowExecutionsPerRequest(), nextPage, pageCount, - resultList.isMaxResultCountReached()); - return result; - } - - /** - * Get the list of dataset ids that the provided user owns. - *

The return value can be one of the following: - *

    - *
  • null when a user has role {@link AccountRole#METIS_ADMIN}, which means the user owns everything
  • - *
  • Empty set if the user owns nothing
  • - *
  • Non-Empty set with the dataset ids that the user owns, for users that have a role other than {@link AccountRole#METIS_ADMIN}
  • - *
- *

- * - * @param metisUserView the user to use for getting the owned dataset ids - * @return a set of dataset ids - */ - private Set getDatasetIdsToFilterOn(MetisUserView metisUserView) { - final Set datasetIds; - if (metisUserView.getAccountRole() == AccountRole.METIS_ADMIN) { - datasetIds = null; - } else { - datasetIds = datasetDao.getAllDatasetsByOrganizationId(metisUserView.getOrganizationId()).stream() - .map(Dataset::getDatasetId).collect(Collectors.toSet()); - } - return datasetIds; - } - - /** - * Retrieve dataset level information of past executions {@link DatasetExecutionInformation} - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier to generate the information for - * @return the structured class containing all the execution information - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public DatasetExecutionInformation getDatasetExecutionInformation(MetisUserView metisUserView, - String datasetId) throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - return getDatasetExecutionInformation(datasetId); - } - - DatasetExecutionInformation getDatasetExecutionInformation(String datasetId) { - - // Obtain the relevant parts of the execution history - final ExecutablePlugin lastHarvestPlugin = Optional.ofNullable( - workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, HARVEST_TYPES, false)) - .map(PluginWithExecutionId::getPlugin).orElse(null); - final PluginWithExecutionId firstPublishPluginWithExecutionId = workflowExecutionDao - .getFirstSuccessfulPlugin(datasetId, PUBLISH_TYPES); - final MetisPlugin firstPublishPlugin = firstPublishPluginWithExecutionId == null ? null - : firstPublishPluginWithExecutionId.getPlugin(); - final ExecutablePlugin lastExecutablePreviewPlugin = Optional.ofNullable(workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(datasetId, EXECUTABLE_PREVIEW_TYPES, false)) - .map(PluginWithExecutionId::getPlugin).orElse(null); - final ExecutablePlugin lastExecutablePublishPlugin = Optional.ofNullable(workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(datasetId, EXECUTABLE_PUBLISH_TYPES, false)) - .map(PluginWithExecutionId::getPlugin).orElse(null); - final PluginWithExecutionId latestPreviewPluginWithExecutionId = workflowExecutionDao - .getLatestSuccessfulPlugin(datasetId, PREVIEW_TYPES); - final PluginWithExecutionId latestPublishPluginWithExecutionId = workflowExecutionDao - .getLatestSuccessfulPlugin(datasetId, PUBLISH_TYPES); - final MetisPlugin lastPreviewPlugin = latestPreviewPluginWithExecutionId == null ? null - : latestPreviewPluginWithExecutionId.getPlugin(); - final MetisPlugin lastPublishPlugin = latestPublishPluginWithExecutionId == null ? null - : latestPublishPluginWithExecutionId.getPlugin(); - final ExecutablePlugin lastExecutableDepublishPlugin = Optional.ofNullable(workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(datasetId, EXECUTABLE_DEPUBLISH_TYPES, false)) - .map(PluginWithExecutionId::getPlugin).orElse(null); - - // Obtain the relevant current executions - final WorkflowExecution runningOrInQueueExecution = getRunningOrInQueueExecution(datasetId); - final boolean isPreviewCleaningOrRunning = isPluginInWorkflowCleaningOrRunning( - runningOrInQueueExecution, PREVIEW_TYPES); - final boolean isPublishCleaningOrRunning = isPluginInWorkflowCleaningOrRunning( - runningOrInQueueExecution, PUBLISH_TYPES); - - final DatasetExecutionInformation executionInfo = new DatasetExecutionInformation(); - // Set the last harvest information - if (Objects.nonNull(lastHarvestPlugin)) { - executionInfo.setLastHarvestedDate(lastHarvestPlugin.getFinishedDate()); - executionInfo.setLastHarvestedRecords( - lastHarvestPlugin.getExecutionProgress().getProcessedRecords() - lastHarvestPlugin - .getExecutionProgress().getErrors()); - } - final Date now = new Date(); - setPreviewInformation(executionInfo, lastExecutablePreviewPlugin, lastPreviewPlugin, - isPreviewCleaningOrRunning, now); - setPublishInformation(executionInfo, firstPublishPlugin, lastExecutablePublishPlugin, - lastPublishPlugin, lastExecutableDepublishPlugin, isPublishCleaningOrRunning, now, - datasetId); - - return executionInfo; - } - - WorkflowExecution getRunningOrInQueueExecution(String datasetId) { - return workflowExecutionDao.getRunningOrInQueueExecution(datasetId); - } - - private void setPreviewInformation(DatasetExecutionInformation executionInfo, - ExecutablePlugin lastExecutablePreviewPlugin, MetisPlugin lastPreviewPlugin, - boolean isPreviewCleaningOrRunning, Date date) { - - boolean lastPreviewHasDeletedRecords = computeRecordCountsAndCheckDeletedRecords(lastExecutablePreviewPlugin, - executionInfo::setLastPreviewRecords, executionInfo::setTotalPreviewRecords); - - //Compute more general information of the plugin - if (Objects.nonNull(lastPreviewPlugin)) { - executionInfo.setLastPreviewDate(lastPreviewPlugin.getFinishedDate()); - //Check if we have information about the total records - final boolean recordsAvailable; - if (executionInfo.getTotalPreviewRecords() > 0) { - recordsAvailable = true; - } else if (executionInfo.getTotalPreviewRecords() == 0) { - recordsAvailable = false; - } else { - recordsAvailable = executionInfo.getLastPreviewRecords() > 0 || lastPreviewHasDeletedRecords; - } - - executionInfo.setLastPreviewRecordsReadyForViewing(recordsAvailable && - !isPreviewCleaningOrRunning && isPreviewOrPublishReadyForViewing(lastPreviewPlugin, - date)); - } - } - - private void setPublishInformation(DatasetExecutionInformation executionInfo, - MetisPlugin firstPublishPlugin, ExecutablePlugin lastExecutablePublishPlugin, - MetisPlugin lastPublishPlugin, ExecutablePlugin lastExecutableDepublishPlugin, - boolean isPublishCleaningOrRunning, Date date, String datasetId) { - - // Set the first publication information - executionInfo.setFirstPublishedDate( - firstPublishPlugin == null ? null : firstPublishPlugin.getFinishedDate()); - - // Determine the depublication situation of the dataset - final boolean datasetCurrentlyDepublished = isDatasetCurrentlyDepublished(lastExecutablePublishPlugin, - lastExecutableDepublishPlugin); - - boolean lastPublishHasDeletedRecords = computeRecordCountsAndCheckDeletedRecords(lastExecutablePublishPlugin, - executionInfo::setLastPublishedRecords, executionInfo::setTotalPublishedRecords); - - //Compute depublish count - final int depublishedRecordCount; - if (datasetCurrentlyDepublished) { - depublishedRecordCount = executionInfo.getLastPublishedRecords(); - } else { - depublishedRecordCount = (int) depublishRecordIdDao - .countSuccessfullyDepublishedRecordIdsForDataset(datasetId); - } - - //Compute more general information of the plugin - if (Objects.nonNull(lastPublishPlugin)) { - executionInfo.setLastPublishedDate(lastPublishPlugin.getFinishedDate()); - - //Check if we have information about the total records - final boolean recordsAvailable; - if (executionInfo.getTotalPublishedRecords() > 0) { - recordsAvailable = true; - } else if (executionInfo.getTotalPublishedRecords() == 0) { - recordsAvailable = false; - } else { - recordsAvailable = - !datasetCurrentlyDepublished && (executionInfo.getLastPublishedRecords() > depublishedRecordCount - || lastPublishHasDeletedRecords); - } - executionInfo.setLastPublishedRecordsReadyForViewing( - recordsAvailable && !isPublishCleaningOrRunning && isPreviewOrPublishReadyForViewing( - lastPublishPlugin, date)); - } - - // Set the last depublished information. - executionInfo.setLastDepublishedRecords(depublishedRecordCount); - if (Objects.nonNull(lastExecutableDepublishPlugin)) { - executionInfo.setLastDepublishedDate(lastExecutableDepublishPlugin.getFinishedDate()); - } - - // Set the publication status. - final PublicationStatus status; - if (datasetCurrentlyDepublished) { - status = PublicationStatus.DEPUBLISHED; - } else if (lastExecutablePublishPlugin != null) { - status = PublicationStatus.PUBLISHED; - } else { - status = null; - } - executionInfo.setPublicationStatus(status); - } - - private static boolean isDatasetCurrentlyDepublished(ExecutablePlugin lastExecutablePublishPlugin, - ExecutablePlugin lastExecutableDepublishPlugin) { - final boolean depublishHappenedAfterLatestExecutablePublish = - lastExecutableDepublishPlugin != null && lastExecutablePublishPlugin != null && - lastExecutablePublishPlugin.getFinishedDate().compareTo(lastExecutableDepublishPlugin.getFinishedDate()) < 0; - /* TODO JV below we use the fact that a record depublish cannot follow a dataset depublish (so - we don't have to look further into the past for all depublish actions after the last - publish). We should make this code more robust by not assuming that here. */ - return depublishHappenedAfterLatestExecutablePublish - && (lastExecutableDepublishPlugin instanceof DepublishPlugin depublishPlugin) - && depublishPlugin.getPluginMetadata().isDatasetDepublish(); - } - - private boolean computeRecordCountsAndCheckDeletedRecords(ExecutablePlugin executablePlugin, IntConsumer lastRecordsSetter, - IntConsumer totalRecordsSetter) { - int recordCount = 0; - int totalRecordCount = -1; - boolean hasDeletedRecords = false; - if (Objects.nonNull(executablePlugin)) { - recordCount = executablePlugin.getExecutionProgress().getProcessedRecords() - - executablePlugin.getExecutionProgress().getErrors(); - totalRecordCount = executablePlugin.getExecutionProgress().getTotalDatabaseRecords(); - hasDeletedRecords = executablePlugin.getExecutionProgress().getDeletedRecords() > 0; - } - lastRecordsSetter.accept(recordCount); - totalRecordsSetter.accept(totalRecordCount); - return hasDeletedRecords; - } - - private boolean isPreviewOrPublishReadyForViewing(MetisPlugin plugin, Date now) { - final boolean dataIsValid = !(plugin instanceof ExecutablePlugin executablePlugin) - || MetisPlugin.getDataStatus(executablePlugin) == DataStatus.VALID; - final boolean enoughTimeHasPassed = getSolrCommitPeriodInMins() < DateUtils - .calculateDateDifference(plugin.getFinishedDate(), now, TimeUnit.MINUTES); - return dataIsValid && enoughTimeHasPassed; - } - - private boolean isPluginInWorkflowCleaningOrRunning(WorkflowExecution runningOrInQueueExecution, - Set pluginTypes) { - return runningOrInQueueExecution != null && runningOrInQueueExecution.getMetisPlugins().stream() - .filter(metisPlugin -> pluginTypes.contains(metisPlugin.getPluginType())) - .map(AbstractMetisPlugin::getPluginStatus).anyMatch( - pluginStatus -> pluginStatus == PluginStatus.CLEANING - || pluginStatus == PluginStatus.RUNNING); - } - - /** - * Retrieve dataset level history of past executions {@link DatasetExecutionInformation} - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId the dataset identifier to generate the history for - * @return the structured class containing all the execution history, ordered by date descending. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public ExecutionHistory getDatasetExecutionHistory(MetisUserView metisUserView, String datasetId) - throws GenericMetisException { - - // Check that the user is authorized - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - - // Get the executions from the database - final ResultList allExecutions = workflowExecutionDao - .getAllWorkflowExecutions(Set.of(datasetId), null, DaoFieldNames.STARTED_DATE, false, 0, - null, false); - - // Filter the executions. - final List executions = allExecutions.getResults().stream().filter( - entry -> entry.getMetisPlugins().stream().anyMatch(OrchestratorService::canDisplayRawXml)) - .map(OrchestratorService::convert).toList(); - - // Done - final ExecutionHistory result = new ExecutionHistory(); - result.setExecutions(executions); - return result; - } - - private static Execution convert(WorkflowExecution execution) { - final Execution result = new Execution(); - result.setWorkflowExecutionId(execution.getId().toString()); - result.setStartedDate(execution.getStartedDate()); - return result; - } - - /** - * Retrieve a list of plugins with data availability {@link PluginsWithDataAvailability} for a given workflow execution. - * - * @param metisUserView the user wishing to perform this operation - * @param executionId the identifier of the execution for which to get the plugins - * @return the structured class containing all the execution history, ordered by date descending. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if an - * non-existing execution ID or version is provided.
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - public PluginsWithDataAvailability getExecutablePluginsWithDataAvailability( - MetisUserView metisUserView, - String executionId) throws GenericMetisException { - - // Get the execution and do the authorization check. - final WorkflowExecution execution = getWorkflowExecutionByExecutionId(metisUserView, executionId); - if (execution == null) { - throw new NoWorkflowExecutionFoundException( - String.format("No workflow execution found for workflowExecutionId: %s", executionId)); - } - - // Compile the result. - final List plugins = execution.getMetisPlugins().stream() - .filter(OrchestratorService::canDisplayRawXml).map(OrchestratorService::convert) - .toList(); - final PluginsWithDataAvailability result = new PluginsWithDataAvailability(); - result.setPlugins(plugins); - - // Done. - return result; - } - - private static PluginWithDataAvailability convert(MetisPlugin plugin) { - final PluginWithDataAvailability result = new PluginWithDataAvailability(); - result.setCanDisplayRawXml(true); // If this method is called, it is known that it can display. - result.setPluginType(plugin.getPluginType()); - return result; - } - - private static boolean canDisplayRawXml(MetisPlugin plugin) { - final boolean result; - if (plugin instanceof ExecutablePlugin executablePlugin) { - final boolean dataIsValid = - MetisPlugin.getDataStatus(executablePlugin) == DataStatus.VALID; - final ExecutionProgress progress = executablePlugin.getExecutionProgress(); - final boolean pluginHasBlacklistedType = Optional.of(executablePlugin) - .map(ExecutablePlugin::getPluginMetadata) - .map(ExecutablePluginMetadata::getExecutablePluginType) - .map(NO_XML_PREVIEW_TYPES::contains).orElse(Boolean.TRUE); - result = dataIsValid && !pluginHasBlacklistedType && progress != null - && progress.getProcessedRecords() > progress.getErrors(); - } else { - result = false; - } - return result; - } - - /** - * Get the evolution of the records from when they were first imported until (and excluding) the specified version. - * - * @param metisUserView the user wishing to perform this operation - * @param executionId The ID of the workflow exection in which the version is created. - * @param pluginType The step within the workflow execution that created the version. - * @return The record evolution. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if an - * non-existing execution ID or version is provided.
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authenticated or authorized to perform this operation
  • - *
- */ - public VersionEvolution getRecordEvolutionForVersion(MetisUserView metisUserView, String executionId, - PluginType pluginType) throws GenericMetisException { - - // Get the execution and do the authorization check. - final WorkflowExecution execution = getWorkflowExecutionByExecutionId(metisUserView, executionId); - if (execution == null) { - throw new NoWorkflowExecutionFoundException( - String.format("No workflow execution found for workflowExecutionId: %s", executionId)); - } - - // Find the plugin (workflow step) in question. - final AbstractMetisPlugin targetPlugin = execution.getMetisPluginWithType(pluginType) - .orElseThrow(() -> new NoWorkflowExecutionFoundException(String - .format("No plugin of type %s found for workflowExecution with id: %s", - pluginType.name(), execution))); - - // Compile the version evolution. - final Collection> evolutionSteps = dataEvolutionUtils - .compileVersionEvolution(targetPlugin, execution); - final VersionEvolution versionEvolution = new VersionEvolution(); - versionEvolution.setEvolutionSteps(evolutionSteps.stream().map(step -> { - final VersionEvolutionStep evolutionStep = new VersionEvolutionStep(); - final ExecutablePlugin plugin = step.getLeft(); - evolutionStep.setWorkflowExecutionId(step.getRight().getId().toString()); - evolutionStep.setPluginType(plugin.getPluginMetadata().getExecutablePluginType()); - evolutionStep.setFinishedTime(plugin.getFinishedDate()); - return evolutionStep; - }).toList()); - return versionEvolution; - } - - /** - * This method returns whether currently it is permitted/possible to perform incremental harvesting for the given dataset. - * - * @param metisUserView the user wishing to perform this operation - * @param datasetId The ID of the dataset for which to check. - * @return Whether we can perform incremental harvesting for the dataset. - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset identifier provided does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is not authorized to perform this task
  • - *
- */ - public boolean isIncrementalHarvestingAllowed(MetisUserView metisUserView, String datasetId) - throws GenericMetisException { - - // Check that the user is authorized - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - - // Do the check. - return workflowValidationUtils.isIncrementalHarvestingAllowed(datasetId); - } - - public int getSolrCommitPeriodInMins() { - synchronized (this) { - return solrCommitPeriodInMins; - } - } - - public void setSolrCommitPeriodInMins(int solrCommitPeriodInMins) { - synchronized (this) { - this.solrCommitPeriodInMins = solrCommitPeriodInMins; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesHelper.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesHelper.java deleted file mode 100644 index 1a52767ed9..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesHelper.java +++ /dev/null @@ -1,96 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.cloud.common.model.dps.NodeReport; -import eu.europeana.cloud.common.model.dps.NodeStatistics; -import eu.europeana.cloud.common.model.dps.StatisticsReport; -import eu.europeana.metis.core.rest.stats.AttributeStatistics; -import eu.europeana.metis.core.rest.stats.NodePathStatistics; -import eu.europeana.metis.core.rest.stats.NodeValueStatistics; -import eu.europeana.metis.core.rest.stats.RecordStatistics; -import java.util.Collection; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.function.Function; -import java.util.stream.Collectors; - -class ProxiesHelper { - - ProxiesHelper() { - } - - RecordStatistics compileRecordStatistics(StatisticsReport report) { - - // Group the node statistics by their respective xpath. - final Map> nodesByXPath = report.getNodeStatistics().stream() - .collect(Collectors.groupingBy(NodeStatistics::getXpath)); - final List nodePathStatisticsList = nodesByXPath.entrySet().stream() - .map(ProxiesHelper::compileNodePathStatistics) - .sorted(Comparator.comparing(NodePathStatistics::getxPath)) - .toList(); - - // Done. - final RecordStatistics result = new RecordStatistics(); - result.setNodePathStatistics(nodePathStatisticsList); - result.setTaskId(report.getTaskId()); - return result; - } - - private static NodePathStatistics compileNodePathStatistics( - Entry> nodeWithXPath) { - return compileNodePathStatistics(nodeWithXPath.getKey(), nodeWithXPath.getValue(), - ProxiesHelper::compileNodeValueStatistics); - } - - NodePathStatistics compileNodePathStatistics(String nodePath, List nodeReports) { - return compileNodePathStatistics(nodePath, nodeReports, - ProxiesHelper::compileNodeValueStatistics); - } - - private static NodePathStatistics compileNodePathStatistics(String nodePath, - List nodes, Function nodeValueConverter) { - final List nodeValueStatisticsList = nodes.stream() - .map(nodeValueConverter) - .sorted(Comparator.comparing(NodeValueStatistics::getValue)) - .toList(); - final NodePathStatistics nodePathStatistics = new NodePathStatistics(); - nodePathStatistics.setxPath(nodePath); - nodePathStatistics.setNodeValueStatistics(nodeValueStatisticsList); - return nodePathStatistics; - } - - private static NodeValueStatistics compileNodeValueStatistics(NodeStatistics nodeStatistics) { - return compileNodeValueStatistics(nodeStatistics.getValue(), nodeStatistics.getOccurrence(), - nodeStatistics.getAttributesStatistics()); - } - - private static NodeValueStatistics compileNodeValueStatistics(NodeReport nodeReport) { - return compileNodeValueStatistics(nodeReport.getNodeValue(), nodeReport.getOccurrence(), - nodeReport.getAttributeStatistics()); - } - - private static NodeValueStatistics compileNodeValueStatistics(String nodeValue, - long occurrence, - Collection attributes) { - final List attributeStatistics = attributes.stream() - .map(ProxiesHelper::compileAttributeStatistics) - .sorted(Comparator.comparing(AttributeStatistics::getxPath) - .thenComparing(AttributeStatistics::getValue)) - .toList(); - final NodeValueStatistics nodeValueStatistics = new NodeValueStatistics(); - nodeValueStatistics.setValue(nodeValue); - nodeValueStatistics.setOccurrences(occurrence); - nodeValueStatistics.setAttributeStatistics(attributeStatistics); - return nodeValueStatistics; - } - - private static AttributeStatistics compileAttributeStatistics( - eu.europeana.cloud.common.model.dps.AttributeStatistics input) { - final AttributeStatistics attributeStatistics = new AttributeStatistics(); - attributeStatistics.setxPath(input.getName()); - attributeStatistics.setValue(input.getValue()); - attributeStatistics.setOccurrences(input.getOccurrence()); - return attributeStatistics; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesService.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesService.java deleted file mode 100644 index a65f0b0154..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ProxiesService.java +++ /dev/null @@ -1,594 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.client.uis.rest.CloudException; -import eu.europeana.cloud.client.uis.rest.UISClient; -import eu.europeana.cloud.common.model.File; -import eu.europeana.cloud.common.model.Representation; -import eu.europeana.cloud.common.model.Revision; -import eu.europeana.cloud.common.model.dps.NodeReport; -import eu.europeana.cloud.common.model.dps.StatisticsReport; -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.cloud.common.response.CloudTagsResponse; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.mcs.driver.FileServiceClient; -import eu.europeana.cloud.mcs.driver.RecordServiceClient; -import eu.europeana.cloud.service.dps.exception.DpsException; -import eu.europeana.cloud.service.mcs.exception.MCSException; -import eu.europeana.cloud.service.uis.exception.RecordDoesNotExistException; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.RecordIdUtils; -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.rest.ListOfIds; -import eu.europeana.metis.core.rest.PaginatedRecordsResponse; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.RecordsResponse; -import eu.europeana.metis.core.rest.stats.NodePathStatistics; -import eu.europeana.metis.core.rest.stats.RecordStatistics; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.GenericMetisException; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Optional; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; - -/** - * Proxies Service which encapsulates functionality that has to be proxied to an external resource. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-26 - */ -public class ProxiesService { - - protected final DateFormat pluginDateFormatForEcloud = new SimpleDateFormat( - "yyyy-MM-dd'T'HH:mm:ss.SSSXXX", Locale.US); - - private final WorkflowExecutionDao workflowExecutionDao; - private final DataSetServiceClient ecloudDataSetServiceClient; - private final RecordServiceClient recordServiceClient; - private final FileServiceClient fileServiceClient; - private final DpsClient dpsClient; - private final UISClient uisClient; - private final String ecloudProvider; - private final Authorizer authorizer; - private final ProxiesHelper proxiesHelper; - private final DataEvolutionUtils dataEvolutionUtils; - - /** - * Constructor with required parameters. - * - * @param workflowExecutionDao {@link WorkflowExecutionDao} - * @param ecloudDataSetServiceClient {@link DataSetServiceClient} - * @param recordServiceClient {@link RecordServiceClient} - * @param fileServiceClient {@link FileServiceClient} - * @param dpsClient {@link DpsClient} - * @param ecloudProvider the ecloud provider string - * @param authorizer the authorizer - */ - public ProxiesService(WorkflowExecutionDao workflowExecutionDao, - DataSetServiceClient ecloudDataSetServiceClient, RecordServiceClient recordServiceClient, - FileServiceClient fileServiceClient, DpsClient dpsClient, UISClient uisClient, String ecloudProvider, - Authorizer authorizer) { - this(workflowExecutionDao, ecloudDataSetServiceClient, recordServiceClient, fileServiceClient, - dpsClient, uisClient, ecloudProvider, authorizer, new ProxiesHelper()); - } - - ProxiesService(WorkflowExecutionDao workflowExecutionDao, - DataSetServiceClient ecloudDataSetServiceClient, RecordServiceClient recordServiceClient, - FileServiceClient fileServiceClient, DpsClient dpsClient, UISClient uisClient, String ecloudProvider, - Authorizer authorizer, ProxiesHelper proxiesHelper) { - this.workflowExecutionDao = workflowExecutionDao; - this.ecloudDataSetServiceClient = ecloudDataSetServiceClient; - this.recordServiceClient = recordServiceClient; - this.fileServiceClient = fileServiceClient; - this.dpsClient = dpsClient; - this.uisClient = uisClient; - this.ecloudProvider = ecloudProvider; - this.authorizer = authorizer; - this.proxiesHelper = proxiesHelper; - this.dataEvolutionUtils = new DataEvolutionUtils(this.workflowExecutionDao); - } - - /** - * Get logs from a specific topology task paged. - * - * @param metisUserView the user wishing to perform this operation - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param from integer to start getting logs from - * @param to integer until where logs should be received - * @return the list of logs - * @throws GenericMetisException can be one of: - *
    - *
  • {@link DpsException} if an error occurred while retrieving the logs from the external - * resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
- */ - public List getExternalTaskLogs(MetisUserView metisUserView, String topologyName, - long externalTaskId, int from, int to) throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, - getDatasetIdFromExternalTaskId(externalTaskId)); - List detailedTaskReportBetweenChunks; - try { - detailedTaskReportBetweenChunks = - dpsClient.getDetailedTaskReportBetweenChunks(topologyName, externalTaskId, from, to); - } catch (DpsException e) { - throw new ExternalTaskException(String.format( - "Getting the task detailed logs failed. topologyName: %s, externalTaskId: %s, from: %s, to: %s", - topologyName, externalTaskId, from, to), e); - } - for (SubTaskInfo subTaskInfo : detailedTaskReportBetweenChunks) { // Hide sensitive information - subTaskInfo.setAdditionalInformations(null); - } - return detailedTaskReportBetweenChunks; - } - - /** - * Check if final report is available. - * - * @param metisUserView the user wishing to perform this operation - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @return true if final report available, false if not or ecloud response {@link jakarta.ws.rs.core.Response.Status)} is not OK, - * based on {@link DpsClient#checkIfErrorReportExists} - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
  • {@link ExternalTaskException} containing {@link DpsException} if an error occurred while checking if the error report exists
  • - *
- */ - public boolean existsExternalTaskReport(MetisUserView metisUserView, String topologyName, - long externalTaskId) - throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, - getDatasetIdFromExternalTaskId(externalTaskId)); - try { - return dpsClient.checkIfErrorReportExists(topologyName, externalTaskId); - } catch (DpsException e) { - throw new ExternalTaskException(String.format( - "Checking if the error report exists failed. topologyName: %s, externalTaskId: %s", - topologyName, externalTaskId), e); - } - } - - /** - * Get the final report that includes all the errors grouped. The number of ids per error can be specified through the - * parameters. - * - * @param metisUserView the user wishing to perform this operation - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param idsPerError the number of ids that should be displayed per error group - * @return the list of errors grouped - * @throws GenericMetisException can be one of: - *
    - *
  • {@link DpsException} if an error occurred while retrieving the report from the external - * resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
- */ - public TaskErrorsInfo getExternalTaskReport(MetisUserView metisUserView, String topologyName, - long externalTaskId, int idsPerError) throws GenericMetisException { - authorizer.authorizeReadExistingDatasetById(metisUserView, - getDatasetIdFromExternalTaskId(externalTaskId)); - TaskErrorsInfo taskErrorsInfo; - try { - taskErrorsInfo = - dpsClient.getTaskErrorsReport(topologyName, externalTaskId, null, idsPerError); - } catch (DpsException e) { - throw new ExternalTaskException(String.format( - "Getting the task error report failed. topologyName: %s, externalTaskId: %s, idsPerError: %s", - topologyName, externalTaskId, idsPerError), e); - } - return taskErrorsInfo; - } - - /** - * Get the statistics of an external task. - * - * @param metisUserView the user wishing to perform this operation - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @return the record statistics for the given task. - * @throws GenericMetisException can be one of: - *
    - *
  • {@link DpsException} if an error occurred while retrieving the statistics from the - * external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
- */ - public RecordStatistics getExternalTaskStatistics(MetisUserView metisUserView, String topologyName, - long externalTaskId) throws GenericMetisException { - - // Authorize - authorizer.authorizeReadExistingDatasetById(metisUserView, - getDatasetIdFromExternalTaskId(externalTaskId)); - - // Obtain the report from eCloud. - final StatisticsReport report; - try { - report = dpsClient.getTaskStatisticsReport(topologyName, externalTaskId); - } catch (DpsException e) { - throw new ExternalTaskException(String.format( - "Getting the task statistics failed. topologyName: %s, externalTaskId: %s", - topologyName, externalTaskId), e); - } - - // Convert them and done. - return proxiesHelper.compileRecordStatistics(report); - } - - /** - * Get additional statistics on a node. This method can be used to elaborate on one of the items returned by {@link - * #getExternalTaskStatistics(MetisUserView, String, long)}. - * - * @param metisUserView the user wishing to perform this operation - * @param topologyName the topology name of the task - * @param externalTaskId the task identifier - * @param nodePath the path of the node for which this request is made - * @return the node statistics for the given path in the given task. - * @throws GenericMetisException can be one of: - *
    - *
  • {@link DpsException} if an error occurred while retrieving the statistics from the - * external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided external task identifier
  • - *
- */ - public NodePathStatistics getAdditionalNodeStatistics(MetisUserView metisUserView, String topologyName, - long externalTaskId, String nodePath) throws GenericMetisException { - - // Authorize - authorizer.authorizeReadExistingDatasetById(metisUserView, - getDatasetIdFromExternalTaskId(externalTaskId)); - - // Obtain the reports from eCloud. - final List nodeReports; - try { - nodeReports = dpsClient.getElementReport(topologyName, externalTaskId, nodePath); - } catch (DpsException e) { - throw new ExternalTaskException(String.format( - "Getting the additional node statistics failed. topologyName: %s, externalTaskId: %s", - topologyName, externalTaskId), e); - } - - // Convert them and done. - return proxiesHelper.compileNodePathStatistics(nodePath, nodeReports); - } - - private String getDatasetIdFromExternalTaskId(long externalTaskId) - throws NoWorkflowExecutionFoundException { - final WorkflowExecution workflowExecution = - this.workflowExecutionDao.getByExternalTaskId(externalTaskId); - if (workflowExecution == null) { - throw new NoWorkflowExecutionFoundException(String - .format("No workflow execution found for externalTaskId: %d, in METIS", externalTaskId)); - } - return workflowExecution.getDatasetId(); - } - - /** - * Get a list with record contents from the external resource based on a workflow execution and {@link PluginType}. - * - * @param metisUserView the user wishing to perform this operation - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param nextPage the string representation of the next page which is provided from the response and can be used to get the - * next page of results. - * TODO: The nextPage parameter is currently ignored and we should decide if we would support it again in the future. - * @param numberOfRecords the number of records per response - * @return the list of records from the external resource - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no - * workflow execution exists for the provided identifier
  • - *
- */ - public PaginatedRecordsResponse getListOfFileContentsFromPluginExecution( - MetisUserView metisUserView, - String workflowExecutionId, ExecutablePluginType pluginType, String nextPage, - int numberOfRecords) throws GenericMetisException { - - // Get the right workflow execution and plugin type. - final Pair executionAndPlugin = getExecutionAndPlugin( - metisUserView, workflowExecutionId, pluginType); - if (executionAndPlugin == null) { - return new PaginatedRecordsResponse(Collections.emptyList(), null); - } - - // Get the list of records. - final String datasetId = executionAndPlugin.getLeft().getEcloudDatasetId(); - final String representationName = MetisPlugin.getRepresentationName(); - final String revisionName = executionAndPlugin.getRight().getPluginType().name(); - final String revisionTimestamp = pluginDateFormatForEcloud - .format(executionAndPlugin.getRight().getStartedDate()); - final List revisionsWithDeletedFlagSetToFalse; - try { - revisionsWithDeletedFlagSetToFalse = ecloudDataSetServiceClient.getRevisionsWithDeletedFlagSetToFalse( - ecloudProvider, datasetId, representationName, revisionName, ecloudProvider, revisionTimestamp, numberOfRecords); - } catch (MCSException e) { - throw new ExternalTaskException(String.format( - "Getting record list with file content failed. workflowExecutionId: %s, pluginType: %s", - workflowExecutionId, pluginType), e); - } - - // Get the records themselves. - final List records = new ArrayList<>(revisionsWithDeletedFlagSetToFalse.size()); - for (CloudTagsResponse cloudTagsResponse : revisionsWithDeletedFlagSetToFalse) { - final Record record = getRecord(executionAndPlugin.getRight(), cloudTagsResponse.getCloudId()); - if (record == null) { - throw new IllegalStateException("This can't happen: eCloud just told us the record exists"); - } - records.add(record); - } - - // Compile the result. - return new PaginatedRecordsResponse(records, null); - } - - /** - * Get a list with record contents from the external resource based on an workflow execution and {@link PluginType}. - * - * @param metisUserView the user wishing to perform this operation - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param ecloudIds the list of ecloud IDs of the records we wish to obtain - * @return the list of records from the external resource - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - public RecordsResponse getListOfFileContentsFromPluginExecution(MetisUserView metisUserView, - String workflowExecutionId, ExecutablePluginType pluginType, ListOfIds ecloudIds) - throws GenericMetisException { - - // Get the right workflow execution and plugin type. - final Pair executionAndPlugin = getExecutionAndPlugin( - metisUserView, workflowExecutionId, pluginType); - if (executionAndPlugin == null) { - throw new NoWorkflowExecutionFoundException(String - .format("No executable plugin of type %s found for workflowExecution with id: %s", - pluginType.name(), workflowExecutionId)); - } - - // Get the records. - final List records = new ArrayList<>(ecloudIds.getIds().size()); - for (String cloudId : ecloudIds.getIds()) { - Optional.ofNullable(getRecord(executionAndPlugin.getRight(), cloudId)).ifPresent(records::add); - } - - // Done. - return new RecordsResponse(records); - } - - /** - * Get a list with record contents from the external resource based on a workflow execution and the predecessor - * of the given {@link PluginType}. - * - * @param metisUserView the user wishing to perform this operation - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param ecloudIds the list of ecloud IDs of the records we wish to obtain - * @return the list of records from the external resource - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while retrieving the records from the external - * resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - public RecordsResponse getListOfFileContentsFromPredecessorPluginExecution(MetisUserView metisUserView, - String workflowExecutionId, ExecutablePluginType pluginType, ListOfIds ecloudIds) - throws GenericMetisException { - - // Get the right workflow execution and plugin type. - final Pair executionAndPlugin = getExecutionAndPlugin( - metisUserView, workflowExecutionId, pluginType); - if (executionAndPlugin == null) { - throw new NoWorkflowExecutionFoundException(String - .format("No executable plugin of type %s found for workflowExecution with id: %s", - pluginType.name(), workflowExecutionId)); - } - - Pair predecessorPlugin = - dataEvolutionUtils.getPreviousExecutionAndPlugin(executionAndPlugin.getRight(), executionAndPlugin.getLeft().getDatasetId()); - if(predecessorPlugin == null){ - throw new NoWorkflowExecutionFoundException(String - .format("No predecessor for executable plugin of type %s found for workflowExecution with id: %s", - pluginType.name(), workflowExecutionId)); - } - - ExecutablePlugin predecessorExecutablePlugin = (ExecutablePlugin) predecessorPlugin.getLeft(); - - // Get the records. - final List records = new ArrayList<>(ecloudIds.getIds().size()); - for (String cloudId : ecloudIds.getIds()) { - Optional.ofNullable(getRecord(predecessorExecutablePlugin, cloudId)).ifPresent(records::add); - } - - // Done. - return new RecordsResponse(records); - } - - /** - * Get a record from the external resource based on o searchId, workflow execution and {@link PluginType}. - * - * @param metisUserView the user wishing to perform this operation - * @param workflowExecutionId the execution identifier of the workflow - * @param pluginType the {@link ExecutablePluginType} that is to be located inside the workflow - * @param idToSearch the ID we are searching for and for which we want to find a record - * @return the record from the external resource - * @throws GenericMetisException can be one of: - *
    - *
  • {@link eu.europeana.metis.exception.ExternalTaskException} if an error occurred while - * retrieving the records from the external resource
  • - *
  • {@link eu.europeana.metis.exception.UserUnauthorizedException} if the user is not - * authorized to perform this task
  • - *
  • {@link eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException} if no workflow - * execution exists for the provided identifier
  • - *
- */ - public Record searchRecordByIdFromPluginExecution(MetisUserView metisUserView, - String workflowExecutionId, ExecutablePluginType pluginType, String idToSearch) - throws GenericMetisException { - - // Get the right workflow execution and plugin type. - final Pair executionAndPlugin = getExecutionAndPlugin( - metisUserView, workflowExecutionId, pluginType); - if (executionAndPlugin == null) { - throw new NoWorkflowExecutionFoundException(String - .format("No executable plugin of type %s found for workflowExecution with id: %s", - pluginType.name(), workflowExecutionId)); - } - - // Check whether the searched ID is known as a Europeana ID or an ecloudId. - final String datasetId = executionAndPlugin.getLeft().getDatasetId(); - String ecloudId = null; - try { - final String normalizedRecordId = RecordIdUtils.checkAndNormalizeRecordId(datasetId, idToSearch) - .map(id -> RecordIdUtils.composeFullRecordId(datasetId, id)).orElse(null); - if (normalizedRecordId != null) { - ecloudId = uisClient.getCloudId(ecloudProvider, normalizedRecordId).getId(); - } - } catch (BadContentException e) { - // Normalization failed. Check whether the ID is already an eCloud ID. - ecloudId = verifyExistenceOfEcloudId(idToSearch); - } catch (CloudException e) { - if (e.getCause() instanceof RecordDoesNotExistException) { - // The record ID does not exist. Check whether the ID is already an eCloud ID. - ecloudId = verifyExistenceOfEcloudId(idToSearch); - } else { - // Some other connectivity issue. - throw new ExternalTaskException( - String.format("Failed to lookup cloudId for idToSearch: %s", idToSearch), e); - } - } - - // Try to retrieve the record. Note: we need to know if the eCloud ID exists at this point - // because getRecord() cannot detect non-existing eCloud IDs. - return ecloudId == null ? null : getRecord(executionAndPlugin.getRight(), ecloudId); - } - - private String verifyExistenceOfEcloudId(String potentialEcloudId) throws ExternalTaskException { - try { - return uisClient.getRecordId(potentialEcloudId).getResults().isEmpty() ? null - : potentialEcloudId; - } catch (CloudException e) { - // TODO currently we can't distinguish between a connection issue and a non-existing eCloud ID. - // The client should be changed to allow for this. We assume here that there is not a connection - // issue because, where this method is called, we just did a successful call to the UIS service. - return null; - } - } - - Pair getExecutionAndPlugin(MetisUserView metisUserView, - String workflowExecutionId, ExecutablePluginType pluginType) throws GenericMetisException { - - // Get the workflow execution - check that the user has rights to access this. - final WorkflowExecution workflowExecution = workflowExecutionDao.getById(workflowExecutionId); - if (workflowExecution == null) { - throw new NoWorkflowExecutionFoundException( - String.format("No workflow execution found for workflowExecutionId: %s, in METIS", - workflowExecutionId)); - } - authorizer.authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - - // Get the plugin for which to get the records and return. - final MetisPlugin plugin = workflowExecution - .getMetisPluginWithType(pluginType.toPluginType()).orElse(null); - if (plugin instanceof ExecutablePlugin executablePlugin) { - return new ImmutablePair<>(workflowExecution, executablePlugin); - } - return null; - } - - Record getRecord(ExecutablePlugin plugin, String ecloudId) throws ExternalTaskException { - - // Get the representation(s) for the given combination of plugin and record ID. - final List representations; - try { - final Revision revision = new Revision(plugin.getPluginType().name(), ecloudProvider, plugin.getStartedDate()); - representations = recordServiceClient.getRepresentationsByRevision(ecloudId, MetisPlugin.getRepresentationName(), revision); - } catch (MCSException e) { - throw new ExternalTaskException(String.format( - "Getting record list with file content failed. externalTaskId: %s, pluginType: %s, ecloudId: %s", - plugin.getExternalTaskId(), plugin.getPluginType(), ecloudId), e); - } - - // If no representation is found, return null. - if (representations == null || representations.isEmpty()) { - return null; - } - final Representation representation = representations.get(0); - - // Perform checks on the file lists. - if (representation.getFiles() == null || representation.getFiles().isEmpty()) { - throw new ExternalTaskException(String.format( - "Expecting one file in the representation, but received none. externalTaskId: %s, pluginType: %s, ecloudId: %s", - plugin.getExternalTaskId(), plugin.getPluginType(), ecloudId)); - } - final File file = representation.getFiles().get(0); - - // Obtain the file contents belonging to this representation version. - try { - final InputStream inputStream = fileServiceClient.getFile(file.getContentUri().toString()); - return new Record(ecloudId, IOUtils.toString(inputStream, StandardCharsets.UTF_8.name())); - } catch (MCSException e) { - throw new ExternalTaskException(String.format( - "Getting record list with file content failed. externalTaskId: %s, pluginType: %s", - plugin.getExternalTaskId(), plugin.getPluginType()), e); - } catch (IOException e) { - throw new ExternalTaskException("Problem while reading the contents of the file.", e); - } - } - - String getEcloudProvider() { - return ecloudProvider; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/RedirectionInferrer.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/RedirectionInferrer.java deleted file mode 100644 index 3aa38e2d5f..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/RedirectionInferrer.java +++ /dev/null @@ -1,109 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import java.util.EnumSet; -import java.util.List; -import java.util.function.BooleanSupplier; -import org.springframework.util.CollectionUtils; - -public class RedirectionInferrer { - - private final WorkflowExecutionDao workflowExecutionDao; - private final DataEvolutionUtils dataEvolutionUtils; - - public RedirectionInferrer(WorkflowExecutionDao workflowExecutionDao, DataEvolutionUtils dataEvolutionUtils) { - this.workflowExecutionDao = workflowExecutionDao; - this.dataEvolutionUtils = dataEvolutionUtils; - } - - /** - * Determines whether to apply redirection as part of the given plugin. We apply the following heuristics to determining this, - * based on the information available to us, and erring on the side of caution in the sense that it is better to perform it once - * too many than once too few: - *
    - *
  1. - * If this is the first plugin of its kind for this dataset, we check for redirects if and only if - * the dataset properties specify any datasets to redirect from. - *
  2. - *
  3. - * If this is not the first plugin of its kind: - *
      - *
    1. - * If a harvesting occurred after the last plugin of the same kind we assume that the records may - * have changed and/or moved and we perform a redirection. - *
    2. - *
    3. - * If the dataset properties (which includes the list of datasets to redirect from) have changed - * since the last plugin of the same kind we assume that the list of datasets to redirect from may - * have changed and we perform a redirection if and only if the dataset properties specify any - * datasets to redirect from. - *
    4. - *
    - *
  4. - *
- * If none of these conditions apply, we do not check for redirects. - * - * @param dataset The dataset. - * @param workflowPredecessor The plugin on which the new workflow is based as a predecessor. Can be null. - * @param executablePluginType The type of the plugin as part of which we may wish to perform redirection. - * @param typesInWorkflowBeforeThisPlugin The types of the plugins that come before this plugin in the new workflow. - * @return Whether to apply redirection as part of this plugin. - */ - public boolean shouldRedirectsBePerformed(Dataset dataset, - PluginWithExecutionId workflowPredecessor, - ExecutablePluginType executablePluginType, - List typesInWorkflowBeforeThisPlugin) { - - // Get some history from the database: find the latest successful plugin of the same type. - // Note: we don't limit to valid data: perhaps the data is deprecated after reindexing. - final PluginWithExecutionId latestSuccessfulPlugin = workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(dataset.getDatasetId(), - EnumSet.of(executablePluginType), false); - - // Check if we can find the answer in the workflow itself. Iterate backwards and see what we find. - for (int i = typesInWorkflowBeforeThisPlugin.size() - 1; i >= 0; i--) { - final ExecutablePluginType type = typesInWorkflowBeforeThisPlugin.get(i); - if (DataEvolutionUtils.getHarvestPluginGroup().contains(type)) { - // If we find a harvest (occurring after any plugin of this type), - // we know we need to perform redirects only if there is a non null latest successful plugin or there are datasets to redirect from. - return latestSuccessfulPlugin != null || !CollectionUtils - .isEmpty(dataset.getDatasetIdsToRedirectFrom()); - } - if (type == executablePluginType) { - // If we find another plugin of the same type (after any harvest) we know we don't need to perform redirect. - return false; - } - } - - // If we have a previous execution of this plugin, we see if things have changed since then. - final boolean performRedirect; - if (latestSuccessfulPlugin == null) { - // If it's the first plugin execution, just check if dataset ids to redirect from are present. - performRedirect = !CollectionUtils.isEmpty(dataset.getDatasetIdsToRedirectFrom()); - } else { - // Check if since the latest plugin's execution, the dataset information is updated and (now) - // contains dataset ids to redirect from. - final boolean datasetUpdatedSinceLatestPlugin = dataset.getUpdatedDate() != null && - dataset.getUpdatedDate().compareTo(latestSuccessfulPlugin.getPlugin().getFinishedDate()) - >= 0 && !CollectionUtils.isEmpty(dataset.getDatasetIdsToRedirectFrom()); - - // Check if the latest plugin execution is based on a different harvest as this one will be. - // If this plugin's harvest cannot be determined, assume it is not the same (this shouldn't - // happen as we checked the workflow already). This is a lambda: we wish to evaluate on demand. - final BooleanSupplier rootDiffersForLatestPlugin = () -> workflowPredecessor == null - || !dataEvolutionUtils.getRootAncestor(latestSuccessfulPlugin) - .equals(dataEvolutionUtils.getRootAncestor(workflowPredecessor)); - - // In either of these situations, we perform a redirect. - performRedirect = datasetUpdatedSinceLatestPlugin || rootDiffersForLatestPlugin.getAsBoolean(); - } - - // Done - return performRedirect; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ScheduleWorkflowService.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ScheduleWorkflowService.java deleted file mode 100644 index 9e790afa46..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/ScheduleWorkflowService.java +++ /dev/null @@ -1,192 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoScheduledWorkflowFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.ScheduledWorkflowAlreadyExistsException; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import java.time.LocalDateTime; -import java.util.List; -import org.apache.commons.lang3.StringUtils; -import org.bson.types.ObjectId; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * Service class that controls the communication between the different DAOs of the system for - * controlling scheduled workflows. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-05 - */ -@Service -public class ScheduleWorkflowService { - - private final ScheduledWorkflowDao scheduledWorkflowDao; - private final WorkflowDao workflowDao; - private final DatasetDao datasetDao; - private final Authorizer authorizer; - - /** - * Constructor with required parameters. - * - * @param scheduledWorkflowDao the dao for accessing schedules - * @param workflowDao the dao for workflows - * @param datasetDao the dao for datasets - * @param authorizer the class used for authorizing requests - */ - @Autowired - public ScheduleWorkflowService(ScheduledWorkflowDao scheduledWorkflowDao, WorkflowDao workflowDao, - DatasetDao datasetDao, Authorizer authorizer) { - this.scheduledWorkflowDao = scheduledWorkflowDao; - this.workflowDao = workflowDao; - this.datasetDao = datasetDao; - this.authorizer = authorizer; - } - - public int getScheduledWorkflowsPerRequest() { - return scheduledWorkflowDao.getScheduledWorkflowPerRequest(); - } - - /** - * Get a scheduled workflow based on datasets identifier. - * - * @param metisUserView the metis user trying to access the scheduled workflow - * @param datasetId the dataset identifier of which a scheduled workflow is to be retrieved - * @return the scheduled workflow - * @throws UserUnauthorizedException if user is unauthorized to access the scheduled workflow - * @throws NoDatasetFoundException if dataset identifier does not exist - */ - public ScheduledWorkflow getScheduledWorkflowByDatasetId(MetisUserView metisUserView, String datasetId) - throws UserUnauthorizedException, NoDatasetFoundException { - authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId); - return scheduledWorkflowDao.getScheduledWorkflowByDatasetId(datasetId); - } - - /** - * Schedules a provided workflow. - * - * @param metisUserView the user that tries to submit a scheduled workflow - * @param scheduledWorkflow the scheduled workflow information - * @throws GenericMetisException which can be one of: - *
    - *
  • {@link NoDatasetFoundException} if the dataset does not exist
  • - *
  • {@link UserUnauthorizedException} if the user is unauthorized
  • - *
  • {@link BadContentException} if some content send was not acceptable
  • - *
  • {@link NoWorkflowFoundException} if the workflow for a dataset was not found
  • - *
  • {@link ScheduledWorkflowAlreadyExistsException} if a scheduled workflow already exists
  • - *
- */ - public void scheduleWorkflow(MetisUserView metisUserView, ScheduledWorkflow scheduledWorkflow) - throws GenericMetisException { - authorizer.authorizeWriteExistingDatasetById(metisUserView, scheduledWorkflow.getDatasetId()); - checkRestrictionsOnScheduleWorkflow(scheduledWorkflow); - scheduledWorkflowDao.create(scheduledWorkflow); - } - - // This method does not require authorization. It is called from a scheduled task. - public List getAllScheduledWorkflowsWithoutAuthorization( - ScheduleFrequence scheduleFrequence, int nextPage) { - return scheduledWorkflowDao.getAllScheduledWorkflows(scheduleFrequence, nextPage); - } - - public List getAllScheduledWorkflows(MetisUserView metisUserView, - ScheduleFrequence scheduleFrequence, int nextPage) throws UserUnauthorizedException { - authorizer.authorizeReadAllDatasets(metisUserView); - return getAllScheduledWorkflowsWithoutAuthorization(scheduleFrequence, nextPage); - } - - // This method does not require authorization. It is called from a scheduled task. - public List getAllScheduledWorkflowsByDateRangeONCE( - LocalDateTime lowerBound, - LocalDateTime upperBound, int nextPage) { - return scheduledWorkflowDao - .getAllScheduledWorkflowsByDateRangeONCE(lowerBound, upperBound, nextPage); - } - - private void checkScheduledWorkflowExistenceForDatasetId(String datasetId) - throws ScheduledWorkflowAlreadyExistsException { - String id = scheduledWorkflowDao.existsForDatasetId(datasetId); - if (id != null) { - throw new ScheduledWorkflowAlreadyExistsException(String.format( - "ScheduledWorkflow for datasetId: %s with id %s, already exists", - datasetId, id)); - } - } - - public void updateScheduledWorkflow(MetisUserView metisUserView, ScheduledWorkflow scheduledWorkflow) - throws GenericMetisException { - authorizer.authorizeWriteExistingDatasetById(metisUserView, scheduledWorkflow.getDatasetId()); - String storedId = checkRestrictionsOnScheduledWorkflowUpdate(scheduledWorkflow); - scheduledWorkflow.setId(new ObjectId(storedId)); - scheduledWorkflowDao.update(scheduledWorkflow); - } - - private void checkRestrictionsOnScheduleWorkflow(ScheduledWorkflow scheduledWorkflow) - throws - NoWorkflowFoundException, NoDatasetFoundException, ScheduledWorkflowAlreadyExistsException, BadContentException { - checkDatasetExistence(scheduledWorkflow.getDatasetId()); - checkWorkflowExistence(scheduledWorkflow.getDatasetId()); - checkScheduledWorkflowExistenceForDatasetId(scheduledWorkflow.getDatasetId()); - if (scheduledWorkflow.getPointerDate() == null) { - throw new BadContentException("PointerDate cannot be null"); - } - if (scheduledWorkflow.getScheduleFrequence() == null - || scheduledWorkflow.getScheduleFrequence() == ScheduleFrequence.NULL) { - throw new BadContentException("NULL or null is not a valid scheduleFrequence"); - } - } - - private String checkRestrictionsOnScheduledWorkflowUpdate( - ScheduledWorkflow scheduledWorkflow) - throws NoScheduledWorkflowFoundException, BadContentException, NoWorkflowFoundException { - checkWorkflowExistence(scheduledWorkflow.getDatasetId()); - String storedId = scheduledWorkflowDao.existsForDatasetId(scheduledWorkflow.getDatasetId()); - if (StringUtils.isEmpty(storedId)) { - throw new NoScheduledWorkflowFoundException(String.format( - "Workflow with datasetId: %s, not found", scheduledWorkflow.getDatasetId())); - } - if (scheduledWorkflow.getPointerDate() == null) { - throw new BadContentException("PointerDate cannot be null"); - } - if (scheduledWorkflow.getScheduleFrequence() == null - || scheduledWorkflow.getScheduleFrequence() == ScheduleFrequence.NULL) { - throw new BadContentException("NULL or null is not a valid scheduleFrequence"); - } - return storedId; - } - - public void deleteScheduledWorkflow(MetisUserView metisUserView, String datasetId) - throws UserUnauthorizedException, NoDatasetFoundException { - authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId); - scheduledWorkflowDao.deleteScheduledWorkflow(datasetId); - } - - private Dataset checkDatasetExistence(String datasetId) throws NoDatasetFoundException { - Dataset dataset = datasetDao.getDatasetByDatasetId(datasetId); - if (dataset == null) { - throw new NoDatasetFoundException( - String.format("No dataset found with datasetId: %s, in METIS", datasetId)); - } - return dataset; - } - - private Workflow checkWorkflowExistence(String datasetId) throws NoWorkflowFoundException { - Workflow workflow = workflowDao.getWorkflow(datasetId); - if (workflow == null) { - throw new NoWorkflowFoundException( - String.format("No workflow found with datasetId: %s, in METIS", datasetId)); - } - return workflow; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/WorkflowExecutionFactory.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/WorkflowExecutionFactory.java deleted file mode 100644 index 0a51d84f7c..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/service/WorkflowExecutionFactory.java +++ /dev/null @@ -1,210 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.metis.core.common.TransformationParameters; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.workflow.ValidationProperties; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import eu.europeana.metis.exception.BadContentException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import org.apache.commons.lang3.StringUtils; - -/** - * Class that contains various functionality for "helping" the {@link OrchestratorService}. - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-10-11 - */ -public class WorkflowExecutionFactory { - - private final DatasetXsltDao datasetXsltDao; - private final DepublishRecordIdDao depublishRecordIdDao; - private final RedirectionInferrer redirectionInferrer; - - private ValidationProperties validationExternalProperties; // Use getter and setter! - private ValidationProperties validationInternalProperties; // Use getter and setter! - private int defaultSamplingSizeForLinkChecking; // Use getter and setter for this field! - - /** - * Constructor with parameters required to support the {@link OrchestratorService} - * - * @param datasetXsltDao the Dao instance to access the dataset xslts - * @param depublishRecordIdDao The Dao instance to access depublish records. - * @param redirectionInferrer the service instance to access redirection logic - */ - public WorkflowExecutionFactory(DatasetXsltDao datasetXsltDao, - DepublishRecordIdDao depublishRecordIdDao, RedirectionInferrer redirectionInferrer) { - this.datasetXsltDao = datasetXsltDao; - this.depublishRecordIdDao = depublishRecordIdDao; - this.redirectionInferrer = redirectionInferrer; - } - - // Expect the dataset to be synced with eCloud. - // Does not save the workflow execution. - WorkflowExecution createWorkflowExecution(Workflow workflow, Dataset dataset, - PluginWithExecutionId predecessor, int priority) - throws BadContentException { - - // Create the plugins - final List> workflowPlugins = new ArrayList<>(); - final List typesInWorkflow = new ArrayList<>(); - for (AbstractExecutablePluginMetadata pluginMetadata : workflow.getMetisPluginsMetadata()) { - if (pluginMetadata.isEnabled()) { - workflowPlugins.add( - createWorkflowExecutionPlugin(dataset, predecessor, pluginMetadata, typesInWorkflow)); - typesInWorkflow.add(pluginMetadata.getExecutablePluginType()); - } - } - - // Set the predecessor - if (predecessor != null) { - workflowPlugins.get(0).getPluginMetadata() - .setPreviousRevisionInformation(predecessor.getPlugin()); - } - - // Done: create workflow with all the information. - return new WorkflowExecution(dataset, workflowPlugins, priority); - } - - private AbstractExecutablePlugin createWorkflowExecutionPlugin(Dataset dataset, - PluginWithExecutionId workflowPredecessor, - AbstractExecutablePluginMetadata pluginMetadata, - List typesInWorkflowBeforeThisPlugin) throws BadContentException { - - // Add some extra configuration to the plugin metadata depending on the type. - if (pluginMetadata instanceof TransformationPluginMetadata transformationPluginMetadata) { - setupXsltIdForPluginMetadata(dataset, transformationPluginMetadata); - } else if (pluginMetadata instanceof ValidationExternalPluginMetadata validationExternalPluginMetadata) { - this.setupValidationExternalForPluginMetadata(validationExternalPluginMetadata, getValidationExternalProperties()); - } else if (pluginMetadata instanceof ValidationInternalPluginMetadata validationInternalPluginMetadata) { - this.setupValidationInternalForPluginMetadata(validationInternalPluginMetadata, getValidationInternalProperties()); - } else if (pluginMetadata instanceof IndexToPreviewPluginMetadata indexToPreviewPluginMetadata) { - indexToPreviewPluginMetadata.setDatasetIdsToRedirectFrom(dataset.getDatasetIdsToRedirectFrom()); - boolean performRedirects = redirectionInferrer.shouldRedirectsBePerformed(dataset, workflowPredecessor, - ExecutablePluginType.PREVIEW, typesInWorkflowBeforeThisPlugin); - indexToPreviewPluginMetadata.setPerformRedirects(performRedirects); - } else if (pluginMetadata instanceof IndexToPublishPluginMetadata indexToPublishPluginMetadata) { - indexToPublishPluginMetadata.setDatasetIdsToRedirectFrom(dataset.getDatasetIdsToRedirectFrom()); - boolean performRedirects = redirectionInferrer.shouldRedirectsBePerformed(dataset, workflowPredecessor, - ExecutablePluginType.PUBLISH, typesInWorkflowBeforeThisPlugin); - indexToPublishPluginMetadata.setPerformRedirects(performRedirects); - } else if (pluginMetadata instanceof DepublishPluginMetadata depublishPluginMetadata) { - setupDepublishPluginMetadata(dataset, depublishPluginMetadata); - } else if (pluginMetadata instanceof LinkCheckingPluginMetadata linkCheckingPluginMetadata) { - linkCheckingPluginMetadata.setSampleSize(getDefaultSamplingSizeForLinkChecking()); - } - - // Create the plugin - return ExecutablePluginFactory.createPlugin(pluginMetadata); - } - - private void setupValidationExternalForPluginMetadata(ValidationExternalPluginMetadata metadata, - ValidationProperties validationProperties) { - metadata.setUrlOfSchemasZip(validationProperties.getUrlOfSchemasZip()); - metadata.setSchemaRootPath(validationProperties.getSchemaRootPath()); - metadata.setSchematronRootPath(validationProperties.getSchematronRootPath()); - } - - private void setupValidationInternalForPluginMetadata(ValidationInternalPluginMetadata metadata, - ValidationProperties validationProperties) { - metadata.setUrlOfSchemasZip(validationProperties.getUrlOfSchemasZip()); - metadata.setSchemaRootPath(validationProperties.getSchemaRootPath()); - metadata.setSchematronRootPath(validationProperties.getSchematronRootPath()); - } - - private void setupXsltIdForPluginMetadata(Dataset dataset, - TransformationPluginMetadata pluginMetadata) { - DatasetXslt xsltObject; - if (pluginMetadata.isCustomXslt()) { - xsltObject = datasetXsltDao.getById(dataset.getXsltId().toString()); - } else { - xsltObject = datasetXsltDao.getLatestDefaultXslt(); - } - if (xsltObject != null && StringUtils.isNotEmpty(xsltObject.getXslt())) { - pluginMetadata.setXsltId(xsltObject.getId().toString()); - } - final TransformationParameters transformationParameters = new TransformationParameters(dataset); - pluginMetadata.setDatasetName(transformationParameters.getDatasetName()); - pluginMetadata.setCountry(transformationParameters.getEdmCountry()); - pluginMetadata.setLanguage(transformationParameters.getEdmLanguage()); - } - - private void setupDepublishPluginMetadata(Dataset dataset, DepublishPluginMetadata pluginMetadata) - throws BadContentException { - final Set recordIdsToDepublish = pluginMetadata.getRecordIdsToDepublish(); - if (!pluginMetadata.isDatasetDepublish()) { - final Set pendingDepublicationIds; - if (recordIdsToDepublish.isEmpty()) { - //Get all record ids that are marked as PENDING_DEPUBLICATION in the database - pendingDepublicationIds = depublishRecordIdDao - .getAllDepublishRecordIdsWithStatus(dataset.getDatasetId(), - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.PENDING_DEPUBLICATION); - } else { - //Match provided record ids that are marked as PENDING_DEPUBLICATION in the database - pendingDepublicationIds = depublishRecordIdDao - .getAllDepublishRecordIdsWithStatus(dataset.getDatasetId(), - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.PENDING_DEPUBLICATION, recordIdsToDepublish); - } - pluginMetadata.setRecordIdsToDepublish(pendingDepublicationIds); - } - } - - public ValidationProperties getValidationExternalProperties() { - synchronized (this) { - return validationExternalProperties; - } - } - - public void setValidationExternalProperties(ValidationProperties validationExternalProperties) { - synchronized (this) { - this.validationExternalProperties = validationExternalProperties; - } - } - - public ValidationProperties getValidationInternalProperties() { - synchronized (this) { - return validationInternalProperties; - } - } - - public void setValidationInternalProperties(ValidationProperties validationInternalProperties) { - synchronized (this) { - this.validationInternalProperties = validationInternalProperties; - } - } - - private int getDefaultSamplingSizeForLinkChecking() { - synchronized (this) { - return defaultSamplingSizeForLinkChecking; - } - } - - public void setDefaultSamplingSizeForLinkChecking(int defaultSamplingSizeForLinkChecking) { - synchronized (this) { - this.defaultSamplingSizeForLinkChecking = defaultSamplingSizeForLinkChecking; - } - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/DepublishRecordIdSortField.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/DepublishRecordIdSortField.java deleted file mode 100644 index 490b010ecf..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/DepublishRecordIdSortField.java +++ /dev/null @@ -1,39 +0,0 @@ -package eu.europeana.metis.core.util; - -import eu.europeana.metis.core.dataset.DepublishRecordId; - -/** - * Defines the sorting fields known for depublished records. - */ -public enum DepublishRecordIdSortField { - - /** - * Sorting based on record ID. - */ - RECORD_ID(DepublishRecordId.RECORD_ID_FIELD), - - /** - * Sorting based on the depublication state. - */ - DEPUBLICATION_STATE(DepublishRecordId.DEPUBLICATION_STATUS_FIELD), - - /** - * Sorting based on the depublication date. - */ - DEPUBLICATION_DATE(DepublishRecordId.DEPUBLICATION_DATE_FIELD); - - private final String databaseField; - - DepublishRecordIdSortField(String databaseField) { - this.databaseField = databaseField; - } - - /** - * Get the corresponding field name in the database. - * - * @return The field name. - */ - public String getDatabaseField() { - return databaseField; - } -} diff --git a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/SortDirection.java b/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/SortDirection.java deleted file mode 100644 index 4095b03c4c..0000000000 --- a/metis-core/metis-core-service/src/main/java/eu/europeana/metis/core/util/SortDirection.java +++ /dev/null @@ -1,36 +0,0 @@ -package eu.europeana.metis.core.util; - -import dev.morphia.query.Sort; -import java.util.function.Function; - -/** - * Defines the sorting directions. - */ -public enum SortDirection { - - /** - * Ascending sort. - */ - ASCENDING(Sort::ascending), - - /** - * Descending sort. - */ - DESCENDING(Sort::descending); - - private final Function sortCreator; - - SortDirection(Function sortCreator) { - this.sortCreator = sortCreator; - } - - /** - * Creates a MongoDB sort based on the given database field - * - * @param databaseField The database field. - * @return The MongoDB sort. - */ - public Sort createSort(String databaseField) { - return sortCreator.apply(databaseField); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDataEvolutionUtils.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDataEvolutionUtils.java deleted file mode 100644 index c11deb10f6..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDataEvolutionUtils.java +++ /dev/null @@ -1,549 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.Pagination; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-01 - */ -class TestDataEvolutionUtils { - - private static final String DATASET_ID = Integer.toString(TestObjectFactory.DATASETID); - private static DataEvolutionUtils dataEvolutionUtils; - private static WorkflowExecutionDao workflowExecutionDao; - - @BeforeAll - static void prepare() { - workflowExecutionDao = mock(WorkflowExecutionDao.class); - dataEvolutionUtils = spy(new DataEvolutionUtils(workflowExecutionDao)); - } - - @AfterEach - void cleanUp() { - reset(dataEvolutionUtils, workflowExecutionDao); - } - - @Test - void testComputePredecessorPlugin_HarvestPlugin() throws PluginExecutionNotAllowed { - assertNull( - dataEvolutionUtils - .computePredecessorPlugin(ExecutablePluginType.OAIPMH_HARVEST, null, DATASET_ID)); - assertNull( - dataEvolutionUtils - .computePredecessorPlugin(ExecutablePluginType.HTTP_HARVEST, null, DATASET_ID)); - assertNull(dataEvolutionUtils.computePredecessorPlugin(ExecutablePluginType.OAIPMH_HARVEST, - ExecutablePluginType.TRANSFORMATION, DATASET_ID)); - assertNull(dataEvolutionUtils.computePredecessorPlugin(ExecutablePluginType.HTTP_HARVEST, - ExecutablePluginType.TRANSFORMATION, DATASET_ID)); - Mockito.verify(workflowExecutionDao, Mockito.never()) - .getLatestSuccessfulExecutablePlugin(anyString(), any(), anyBoolean()); - } - - @Test - void testComputePredecessorPlugin() throws PluginExecutionNotAllowed { - - // Test the actual predecessor types without enforcing a predecessor type. - testComputePredecessorPlugin(new OaipmhHarvestPluginMetadata(), Collections.emptySet(), null); - testComputePredecessorPlugin(new HTTPHarvestPluginMetadata(), Collections.emptySet(), null); - testComputePredecessorPlugin(new ValidationExternalPluginMetadata(), - DataEvolutionUtils.getHarvestPluginGroup(), null); - testComputePredecessorPlugin(new TransformationPluginMetadata(), - EnumSet.of(ExecutablePluginType.VALIDATION_EXTERNAL), null); - testComputePredecessorPlugin(new ValidationInternalPluginMetadata(), - EnumSet.of(ExecutablePluginType.TRANSFORMATION), null); - testComputePredecessorPlugin(new NormalizationPluginMetadata(), - EnumSet.of(ExecutablePluginType.VALIDATION_INTERNAL), null); - testComputePredecessorPlugin(new EnrichmentPluginMetadata(), - EnumSet.of(ExecutablePluginType.NORMALIZATION), null); - testComputePredecessorPlugin(new MediaProcessPluginMetadata(), - EnumSet.of(ExecutablePluginType.ENRICHMENT), null); - testComputePredecessorPlugin(new IndexToPreviewPluginMetadata(), - EnumSet.of(ExecutablePluginType.MEDIA_PROCESS), null); - testComputePredecessorPlugin(new IndexToPublishPluginMetadata(), - EnumSet.of(ExecutablePluginType.PREVIEW), null); - testComputePredecessorPlugin(new DepublishPluginMetadata(), - EnumSet.of(ExecutablePluginType.PUBLISH), null); - testComputePredecessorPlugin(new LinkCheckingPluginMetadata(), - DataEvolutionUtils.getAllExceptLinkGroup(), null); - - // Test enforcing a predecessor type. - testComputePredecessorPlugin(new OaipmhHarvestPluginMetadata(), Collections.emptySet(), - ExecutablePluginType.ENRICHMENT); - testComputePredecessorPlugin(new HTTPHarvestPluginMetadata(), Collections.emptySet(), - ExecutablePluginType.ENRICHMENT); - testComputePredecessorPlugin(new TransformationPluginMetadata(), - EnumSet.of(ExecutablePluginType.OAIPMH_HARVEST), ExecutablePluginType.OAIPMH_HARVEST); - } - - private void testComputePredecessorPlugin(ExecutablePluginMetadata metadata, - Set predecessorTypes, ExecutablePluginType enforcedPluginType) - throws PluginExecutionNotAllowed { - // Create some objects. - final AbstractExecutablePlugin rootPlugin = mock(AbstractExecutablePlugin.class); - final String rootPluginId = "root plugin ID"; - when(rootPlugin.getId()).thenReturn(rootPluginId); - final WorkflowExecution rootExecution = new WorkflowExecution(); - final ObjectId rootExecutionId = new ObjectId(new Date(1)); - rootExecution.setId(rootExecutionId); - final WorkflowExecution predecessorExecution = new WorkflowExecution(); - final ObjectId predecessorExecutionId = new ObjectId(new Date(2)); - predecessorExecution.setId(predecessorExecutionId); - - // Mock the DAO for the objects just created. - int counter = 1; - AbstractExecutablePlugin recentPredecessorPlugin = null; - boolean needsValidPredecessor = - metadata.getExecutablePluginType() != ExecutablePluginType.DEPUBLISH; - for (ExecutablePluginType predecessorType : predecessorTypes) { - final AbstractExecutablePlugin predecessorPlugin = ExecutablePluginFactory - .createPlugin(metadata); - predecessorPlugin.setExecutionProgress(new ExecutionProgress()); - predecessorPlugin.getExecutionProgress().setProcessedRecords(1); - predecessorPlugin.getExecutionProgress().setErrors(0); - predecessorPlugin.setFinishedDate(new Date(counter)); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(DATASET_ID, - Collections.singleton(predecessorType), needsValidPredecessor)).thenReturn( - new PluginWithExecutionId<>(predecessorExecutionId.toString(), predecessorPlugin)); - recentPredecessorPlugin = predecessorPlugin; - counter++; - } - if (predecessorTypes.isEmpty() - || metadata.getExecutablePluginType() == ExecutablePluginType.DEPUBLISH) { - assertNull(dataEvolutionUtils - .computePredecessorPlugin(metadata.getExecutablePluginType(), enforcedPluginType, - DATASET_ID)); - } else { - assertNotNull(recentPredecessorPlugin); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(DATASET_ID, - DataEvolutionUtils.getHarvestPluginGroup(), true)) - .thenReturn(new PluginWithExecutionId<>(rootExecutionId.toString(), rootPlugin)); - when(workflowExecutionDao.getById(predecessorExecutionId.toString())) - .thenReturn(predecessorExecution); - final List> evolution = - Arrays.asList(ImmutablePair.of(rootPlugin, rootExecution), - ImmutablePair.of(mock(AbstractExecutablePlugin.class), rootExecution), - ImmutablePair.of(mock(AbstractExecutablePlugin.class), rootExecution)); - when(dataEvolutionUtils.compileVersionEvolution(recentPredecessorPlugin, predecessorExecution)) - .thenReturn(evolution); - - // Test without errors - final PluginWithExecutionId withoutErrorsResult = dataEvolutionUtils - .computePredecessorPlugin(metadata.getExecutablePluginType(), enforcedPluginType, - DATASET_ID); - assertSame(recentPredecessorPlugin, withoutErrorsResult.getPlugin()); - assertEquals(predecessorExecution.getId().toString(), withoutErrorsResult.getExecutionId()); - - // Test when root plugin doesn't match - final AbstractExecutablePlugin otherRootPlugin = mock(AbstractExecutablePlugin.class); - final String otherRootPluginId = "other root plugin ID"; - when(otherRootPlugin.getId()).thenReturn(otherRootPluginId); - when(dataEvolutionUtils.compileVersionEvolution(recentPredecessorPlugin, predecessorExecution)) - .thenReturn( - Collections.singletonList(ImmutablePair.of(otherRootPlugin, rootExecution))); - assertThrows(PluginExecutionNotAllowed.class, - () -> dataEvolutionUtils.computePredecessorPlugin(metadata.getExecutablePluginType(), - enforcedPluginType, DATASET_ID)); - when(dataEvolutionUtils.compileVersionEvolution(recentPredecessorPlugin, predecessorExecution)) - .thenReturn(Collections.singletonList(ImmutablePair.of(rootPlugin, rootExecution))); - assertSame(recentPredecessorPlugin, dataEvolutionUtils.computePredecessorPlugin( - metadata.getExecutablePluginType(), enforcedPluginType, DATASET_ID).getPlugin()); - - // Test with errors - recentPredecessorPlugin.getExecutionProgress().setErrors(1); - assertThrows(PluginExecutionNotAllowed.class, () -> dataEvolutionUtils.computePredecessorPlugin( - metadata.getExecutablePluginType(), enforcedPluginType, DATASET_ID)); - - // Test without progress information - recentPredecessorPlugin.setExecutionProgress(null); - assertThrows(PluginExecutionNotAllowed.class, () -> dataEvolutionUtils.computePredecessorPlugin( - metadata.getExecutablePluginType(), enforcedPluginType, DATASET_ID)); - } - } - - @Test - void testComputePredecessorPluginForWorkflowExecution() { - - // Add non executable plugin. - final List plugins = new ArrayList<>(); - plugins.add(new ReindexToPreviewPlugin(new ReindexToPreviewPluginMetadata())); - - // Add finished plugin of the wrong type. - final AbstractMetisPlugin pluginOfWrongType = - ExecutablePluginFactory.createPlugin(new TransformationPluginMetadata()); - pluginOfWrongType.setPluginStatus(PluginStatus.FINISHED); - plugins.add(pluginOfWrongType); - - // Add two finished plugins of the right type. - final AbstractMetisPlugin firstCandidate = - ExecutablePluginFactory.createPlugin(new EnrichmentPluginMetadata()); - firstCandidate.setPluginStatus(PluginStatus.FINISHED); - plugins.add(firstCandidate); - final AbstractMetisPlugin lastCandidate = - ExecutablePluginFactory.createPlugin(new EnrichmentPluginMetadata()); - lastCandidate.setPluginStatus(PluginStatus.FINISHED); - plugins.add(lastCandidate); - - // Add non-finished plugin of the right type. - final AbstractMetisPlugin pluginOfWrongStatus = - ExecutablePluginFactory.createPlugin(new EnrichmentPluginMetadata()); - pluginOfWrongStatus.setPluginStatus(PluginStatus.CANCELLED); - plugins.add(pluginOfWrongStatus); - - // Add all this to a workflow execution. - final WorkflowExecution workflowExecution = new WorkflowExecution(); - workflowExecution.setMetisPlugins(plugins); - - // Execute the call expecting a successful result. - assertSame(lastCandidate, - DataEvolutionUtils - .computePredecessorPlugin(ExecutablePluginType.MEDIA_PROCESS, workflowExecution)); - - // Execute the call for plugin type not requiring predecessor - assertNull( - DataEvolutionUtils - .computePredecessorPlugin(ExecutablePluginType.HTTP_HARVEST, workflowExecution)); - - // Execute the call for failed result - assertThrows(IllegalArgumentException.class, - () -> DataEvolutionUtils - .computePredecessorPlugin(ExecutablePluginType.PUBLISH, workflowExecution)); - } - - @Test - void testGetRecordEvolutionForVersionHappyFlow() { - - // Create two workflow executions with three plugins and link them together - final String datasetId = "dataset ID"; - final AbstractExecutablePlugin plugin1 = mock(AbstractExecutablePlugin.class); - final AbstractExecutablePlugin plugin2 = mock(AbstractExecutablePlugin.class); - final AbstractExecutablePlugin plugin3 = mock(AbstractExecutablePlugin.class); - final WorkflowExecution execution1 = createWorkflowExecution(datasetId, plugin1); - final WorkflowExecution execution2 = createWorkflowExecution(datasetId, plugin2, plugin3); - doReturn(null).when(dataEvolutionUtils).getPreviousExecutionAndPlugin(plugin1, datasetId); - doReturn(new ImmutablePair<>(plugin1, execution1)).when(dataEvolutionUtils) - .getPreviousExecutionAndPlugin(plugin2, datasetId); - doReturn(new ImmutablePair<>(plugin2, execution2)).when(dataEvolutionUtils) - .getPreviousExecutionAndPlugin(plugin3, datasetId); - - // Execute the call to examine all three - final List> resultForThree = dataEvolutionUtils - .compileVersionEvolution(plugin3, execution2); - assertNotNull(resultForThree); - assertEquals(2, resultForThree.size()); - assertSame(plugin1, resultForThree.get(0).getLeft()); - assertSame(execution1, resultForThree.get(0).getRight()); - assertSame(plugin2, resultForThree.get(1).getLeft()); - assertSame(execution2, resultForThree.get(1).getRight()); - - // Execute the call to examine just two - final List> resultForTwo = dataEvolutionUtils - .compileVersionEvolution(plugin2, execution2); - assertNotNull(resultForTwo); - assertEquals(1, resultForTwo.size()); - assertSame(plugin1, resultForThree.get(0).getLeft()); - assertSame(execution1, resultForThree.get(0).getRight()); - - // Execute the call to examine just one - final List> resultForOne = dataEvolutionUtils - .compileVersionEvolution(plugin1, execution1); - assertNotNull(resultForOne); - assertTrue(resultForOne.isEmpty()); - } - - private static WorkflowExecution createWorkflowExecution(String datasetId, - AbstractMetisPlugin... plugins) { - final WorkflowExecution result = new WorkflowExecution(); - result.setId(new ObjectId()); - result.setDatasetId(datasetId); - result.setMetisPlugins(Arrays.asList(plugins)); - return result; - } - - @Test - void testGetPreviousExecutionAndPlugin() { - - // Create some entities that we will be using. - final String datasetId = "dataset id"; - final PluginType pluginType = PluginType.MEDIA_PROCESS; - final PluginType previousPluginType = PluginType.OAIPMH_HARVEST; - final Date previousPluginTime = new Date(); - final WorkflowExecution previousExecution = spy(new WorkflowExecution()); - final AbstractMetisPlugin previousPlugin = createMetisPlugin(previousPluginType, null, null); - final AbstractMetisPlugin plugin = createMetisPlugin(pluginType, previousPluginType, - previousPluginTime); - - // Test the absence of one or both of the pointers to a previous execution. - assertNull(dataEvolutionUtils.getPreviousExecutionAndPlugin(createMetisPlugin( - pluginType, null, null), datasetId)); - assertNull(dataEvolutionUtils.getPreviousExecutionAndPlugin(createMetisPlugin( - pluginType, previousPluginType, null), datasetId)); - assertNull(dataEvolutionUtils.getPreviousExecutionAndPlugin(createMetisPlugin( - pluginType, null, previousPluginTime), datasetId)); - - // Test the absence of the execution despite the presence of the pointers. - when(workflowExecutionDao - .getByTaskExecution(eq(new ExecutedMetisPluginId(previousPluginTime, previousPluginType)), eq(datasetId))) - .thenReturn(null); - assertNull(dataEvolutionUtils.getPreviousExecutionAndPlugin(plugin, datasetId)); - when(workflowExecutionDao - .getByTaskExecution(eq(new ExecutedMetisPluginId(previousPluginTime, previousPluginType)), eq(datasetId))) - .thenReturn(previousExecution); - - // Test the absence of the plugin despite the presence of the pointers. - when(previousExecution.getMetisPluginWithType(eq(previousPluginType))).thenReturn( - Optional.empty()); - assertNull(dataEvolutionUtils.getPreviousExecutionAndPlugin(plugin, datasetId)); - when(previousExecution.getMetisPluginWithType(eq(previousPluginType))) - .thenReturn(Optional.of(previousPlugin)); - - // Test the happy flow - final Pair result = dataEvolutionUtils - .getPreviousExecutionAndPlugin(plugin, datasetId); - assertNotNull(result); - assertSame(previousExecution, result.getRight()); - assertSame(previousPlugin, result.getLeft()); - } - - private static AbstractMetisPlugin createMetisPlugin(PluginType type, PluginType previousType, - Date previousDate) { - AbstractMetisPluginMetadata metadata = mock(AbstractMetisPluginMetadata.class); - when(metadata.getPluginType()).thenReturn(type); - when(metadata.getRevisionNamePreviousPlugin()) - .thenReturn(previousType == null ? null : previousType.name()); - when(metadata.getRevisionTimestampPreviousPlugin()).thenReturn(previousDate); - AbstractMetisPlugin result = mock(AbstractMetisPlugin.class); - when(result.getPluginType()).thenReturn(type); - when(result.getPluginMetadata()).thenReturn(metadata); - return result; - } - - @Test - void testGetPublishedHarvestIncrements() { - - // Create a bunch of harvest and index plugins and link them - final var fullOaiHarvest1 = createOaiHarvestPlugin(new Date(10), false, "A"); - final var incrementalOaiHarvest2 = createOaiHarvestPlugin(new Date(20), true, "B"); - final var httpHarvest3 = createExecutableMetisPlugin(ExecutablePluginType.HTTP_HARVEST, - new Date(30), HTTPHarvestPlugin.class, HTTPHarvestPluginMetadata.class, "C"); - final var fullOaiHarvest4 = createOaiHarvestPlugin(new Date(40), false, "D"); - final var incrementalOaiHarvest5 = createOaiHarvestPlugin(new Date(50), true, "E"); - final var indexPlugin1 = createIndexToPublish(new Date(11), "F"); - final var indexPlugin2a = createIndexToPublish(new Date(21), "G"); - final var indexPlugin2b = createIndexToPublish(new Date(22), "H"); - final var indexPlugin3a = createIndexToPublish(new Date(31), "I"); - final var indexPlugin3b = createIndexToPublish(new Date(32), "J"); - final var indexPlugin4a = createIndexToPublish(new Date(41), "K"); - final var indexPlugin4b = createIndexToPublish(new Date(42), "L"); - final var indexPlugin5a = createIndexToPublish(new Date(51), "M"); - final var indexPlugin5b = createIndexToPublish(new Date(52), "N"); - doReturn(fullOaiHarvest1).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin1)); - doReturn(incrementalOaiHarvest2).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin2a)); - doReturn(incrementalOaiHarvest2).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin2b)); - doReturn(httpHarvest3).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin3a)); - doReturn(httpHarvest3).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin3b)); - doReturn(fullOaiHarvest4).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin4a)); - doReturn(fullOaiHarvest4).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin4b)); - doReturn(incrementalOaiHarvest5).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin5a)); - doReturn(incrementalOaiHarvest5).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin5b)); - - // Test happy flow with two OAI harvests. Only last full and last incremented to be returned. - final var listOfAllOaiIndex = List.of(indexPlugin5b, indexPlugin5a, indexPlugin4b, - indexPlugin4a, indexPlugin2b, indexPlugin2a, indexPlugin1); - doReturn(listOfAllOaiIndex).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - final var result1 = dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID); - assertListSameItems(List.of(fullOaiHarvest4, incrementalOaiHarvest5), result1); - - // Test happy flow with an http harvest - final var listOfHttpAndOaiIndex = List.of(indexPlugin5b, indexPlugin5a, indexPlugin3b, - indexPlugin3a, indexPlugin2b, indexPlugin2a, indexPlugin1); - doReturn(listOfHttpAndOaiIndex).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - final var result2 = dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID); - assertListSameItems(List.of(httpHarvest3, incrementalOaiHarvest5), result2); - - // Test happy flow with just one full harvest - doReturn(List.of(indexPlugin1)).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - final var result3 = dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID); - assertListSameItems(List.of(fullOaiHarvest1), result3); - - // Test flow with no harvest - doReturn(Collections.emptyList()).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - assertTrue(dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID).isEmpty()); - - // Test flow with only an incremental harvest - doReturn(List.of(indexPlugin5b)).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - assertTrue(dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID).isEmpty()); - - // Test flow with invalid harvests or non-harvests - doReturn(List.of(indexPlugin5a, indexPlugin4a, indexPlugin1)).when(dataEvolutionUtils) - .getPublishOperationsSortedInversely(DATASET_ID); - doReturn(DataStatus.DELETED).when(indexPlugin4a.getPlugin()).getDataStatus(); - assertTrue(dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID).isEmpty()); - doReturn(DataStatus.DEPRECATED).when(indexPlugin4a.getPlugin()).getDataStatus(); - assertListSameItems(List.of(fullOaiHarvest4, incrementalOaiHarvest5), - dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID)); - doReturn(DataStatus.VALID).when(indexPlugin4a.getPlugin()).getDataStatus(); - assertListSameItems(List.of(fullOaiHarvest4, incrementalOaiHarvest5), - dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID)); - doReturn(indexPlugin4a).when(dataEvolutionUtils).getRootAncestor(same(indexPlugin4a)); - assertTrue(dataEvolutionUtils.getPublishedHarvestIncrements(DATASET_ID).isEmpty()); - } - - private void assertListSameItems( - List> expected, - List> actual) { - assertListSameItems(expected, actual, item -> item); - } - - private void assertListSameItems(List expected, List actual, - Function extractor) { - assertNotNull(expected); - assertEquals(expected.size(), actual.size()); - for (int i = 0; i < expected.size(); i++) { - assertSame(expected.get(i), extractor.apply(actual.get(i))); - } - } - - private static > - PluginWithExecutionId createExecutableMetisPlugin(ExecutablePluginType type, Date startedDate, - Class pluginClass, Class metadataClass, String executionId) { - M metadata = mock(metadataClass); - doReturn(type).when(metadata).getExecutablePluginType(); - T result = mock(pluginClass); - when(result.getPluginType()).thenReturn(type.toPluginType()); - when(result.getPluginMetadata()).thenReturn(metadata); - when(result.getStartedDate()).thenReturn(startedDate); - return new PluginWithExecutionId<>(executionId, result); - } - - private static PluginWithExecutionId createIndexToPublish(Date startedDate, - String executionId) { - return createExecutableMetisPlugin(ExecutablePluginType.PUBLISH, startedDate, - IndexToPublishPlugin.class, IndexToPublishPluginMetadata.class, executionId); - } - - private static PluginWithExecutionId createOaiHarvestPlugin(Date startedDate, - boolean incremental, String executionId) { - final PluginWithExecutionId result = createExecutableMetisPlugin( - ExecutablePluginType.OAIPMH_HARVEST, startedDate, OaipmhHarvestPlugin.class, - OaipmhHarvestPluginMetadata.class, executionId); - when(result.getPlugin().getPluginMetadata().isIncrementalHarvest()).thenReturn(incremental); - return result; - } - - @Test - void testGetPublishOperationsSortedInversely(){ - - // Create some objects - final var otherPluginA = createOaiHarvestPlugin(new Date(0), false, null).getPlugin(); - final var indexPluginA = createIndexToPublish(new Date(1), null).getPlugin(); - final var indexPluginB1 = createIndexToPublish(new Date(2), null).getPlugin(); - final var indexPluginB2 = createIndexToPublish(new Date(3), null).getPlugin(); - final var executionA = createWorkflowExecution(DATASET_ID, otherPluginA, indexPluginA); - final var executionB = createWorkflowExecution(DATASET_ID, indexPluginB1, indexPluginB2); - final var pagination = mock(Pagination.class); - - // Test happy flow - final var input = new ResultList<>(List.of(new ExecutionDatasetPair(new Dataset(), executionA), - new ExecutionDatasetPair(new Dataset(), executionB)), true); - doReturn(pagination).when(workflowExecutionDao).createPagination(0, null, true); - doReturn(input).when(workflowExecutionDao) - .getWorkflowExecutionsOverview(eq(Set.of(DATASET_ID)), eq(Set.of(PluginStatus.FINISHED)), - eq(Set.of(PluginType.PUBLISH)), isNull(), isNull(), same(pagination)); - final List> result1 = dataEvolutionUtils - .getPublishOperationsSortedInversely(DATASET_ID); - assertListSameItems(List.of(indexPluginB2, indexPluginB1, indexPluginA), result1, - PluginWithExecutionId::getPlugin); - - // Test happy flow with different order - doReturn(new Date(13)).when(indexPluginA).getStartedDate(); - doReturn(new Date(12)).when(indexPluginB1).getStartedDate(); - doReturn(new Date(11)).when(indexPluginB2).getStartedDate(); - final List> result2 = dataEvolutionUtils - .getPublishOperationsSortedInversely(DATASET_ID); - assertListSameItems(List.of(indexPluginA, indexPluginB1, indexPluginB2), result2, - PluginWithExecutionId::getPlugin); - - // Test for no results - doReturn(new ResultList<>(Collections.emptyList(), true)).when(workflowExecutionDao) - .getWorkflowExecutionsOverview(eq(Set.of(DATASET_ID)), eq(Set.of(PluginStatus.FINISHED)), - eq(Set.of(PluginType.PUBLISH)), isNull(), isNull(), same(pagination)); - final List> result3 = dataEvolutionUtils - .getPublishOperationsSortedInversely(DATASET_ID); - assertListSameItems(Collections.emptyList(), result3, PluginWithExecutionId::getPlugin); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetDao.java deleted file mode 100644 index 04ad58dc47..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetDao.java +++ /dev/null @@ -1,461 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; -import dev.morphia.DeleteOptions; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.service.mcs.exception.DataSetAlreadyExistsException; -import eu.europeana.cloud.service.mcs.exception.MCSException; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetIdSequence; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class TestDatasetDao { - - private static DatasetDao datasetDao; - private static Dataset dataset; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static MorphiaDatastoreProviderImpl provider; - private static DataSetServiceClient ecloudDataSetServiceClient; - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - MongoClient mongoClient = MongoClients - .create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = new MorphiaDatastoreProviderImpl(mongoClient, "test"); - ecloudDataSetServiceClient = Mockito.mock(DataSetServiceClient.class); - - datasetDao = new DatasetDao(provider, ecloudDataSetServiceClient); - datasetDao.setDatasetsPerRequest(1); - datasetDao.setEcloudProvider("ecloudProvider"); - - dataset = TestObjectFactory.createDataset("testName"); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @AfterEach - void cleanUp() { - Datastore datastore = provider.getDatastore(); - datastore.find(Dataset.class).delete(new DeleteOptions().multi(true)); - datastore.find(DatasetIdSequence.class).delete(); - Mockito.reset(ecloudDataSetServiceClient); - } - - @Test - void testCreateRetrieveDataset() { - datasetDao.create(dataset); - Dataset storedDataset = datasetDao.getDatasetByDatasetId(dataset.getDatasetId()); - assertEquals(dataset.getDatasetName(), storedDataset.getDatasetName()); - assertEquals(dataset.getCountry(), storedDataset.getCountry()); - assertEquals(dataset.getCreatedDate(), storedDataset.getCreatedDate()); - assertEquals(dataset.getDataProvider(), storedDataset.getDataProvider()); - assertEquals(dataset.getDescription(), storedDataset.getDescription()); - assertEquals(dataset.getLanguage(), storedDataset.getLanguage()); - assertEquals(dataset.getPublicationFitness(), storedDataset.getPublicationFitness()); - assertEquals(dataset.getNotes(), storedDataset.getNotes()); - assertEquals(dataset.getReplacedBy(), storedDataset.getReplacedBy()); - assertEquals(dataset.getUpdatedDate(), storedDataset.getUpdatedDate()); - } - - - @Test - void testUpdateRetrieveDataset() { - datasetDao.create(dataset); - datasetDao.update(dataset); - Dataset storedDataset = datasetDao.getDatasetByDatasetId(dataset.getDatasetId()); - assertEquals(dataset.getDatasetName(), storedDataset.getDatasetName()); - assertEquals(dataset.getCountry(), storedDataset.getCountry()); - assertEquals(dataset.getCreatedDate(), storedDataset.getCreatedDate()); - assertEquals(dataset.getDataProvider(), storedDataset.getDataProvider()); - assertEquals(dataset.getDescription(), storedDataset.getDescription()); - assertEquals(dataset.getLanguage(), storedDataset.getLanguage()); - assertEquals(dataset.getPublicationFitness(), storedDataset.getPublicationFitness()); - assertEquals(dataset.getNotes(), storedDataset.getNotes()); - assertEquals(dataset.getReplacedBy(), storedDataset.getReplacedBy()); - assertEquals(dataset.getUpdatedDate(), storedDataset.getUpdatedDate()); - } - - @Test - void testDeleteDataset() { - datasetDao.create(dataset); - Dataset storedDataset = datasetDao.getDatasetByDatasetId(dataset.getDatasetId()); - datasetDao.delete(storedDataset); - storedDataset = datasetDao.getDatasetByDatasetId(dataset.getDatasetId()); - assertNull(storedDataset); - } - - @Test - void testDelete() { - String key = datasetDao.create(dataset).getId().toString(); - Dataset storedDataset = datasetDao.getById(key); - assertTrue(datasetDao.delete(storedDataset)); - assertNull(datasetDao.getById(key)); - } - - @Test - void testDeleteByDatasetId() { - String key = datasetDao.create(dataset).getId().toString(); - Dataset storedDataset = datasetDao.getById(key); - assertTrue(datasetDao.deleteByDatasetId(storedDataset.getDatasetId())); - assertNull(datasetDao.getById(key)); - } - - @Test - void testGetByDatasetName() { - Dataset createdDataset = datasetDao.create(dataset); - Dataset storedDataset = datasetDao.getDatasetByDatasetName(createdDataset.getDatasetName()); - assertEquals(createdDataset.getDatasetId(), storedDataset.getDatasetId()); - } - - @Test - void testGetByDatasetId() { - Dataset createdDataset = datasetDao.create(dataset); - Dataset storedDataset = datasetDao.getDatasetByDatasetId(createdDataset.getDatasetId()); - assertEquals(createdDataset.getDatasetName(), storedDataset.getDatasetName()); - } - - @Test - void getDatasetByOrganizationIdAndDatasetName() { - Dataset createdDataset = datasetDao.create(dataset); - Dataset storedDataset = datasetDao - .getDatasetByOrganizationIdAndDatasetName(createdDataset.getOrganizationId(), - createdDataset.getDatasetName()); - assertEquals(createdDataset.getDatasetId(), storedDataset.getDatasetId()); - } - - @Test - void testExistsDatasetByDatasetName() { - Dataset createdDataset = datasetDao.create(dataset); - assertTrue(datasetDao.existsDatasetByDatasetName(createdDataset.getDatasetName())); - datasetDao.deleteByDatasetId(createdDataset.getDatasetId()); - assertFalse(datasetDao.existsDatasetByDatasetName(createdDataset.getDatasetName())); - } - - @Test - void testGetAllDatasetByProvider() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setProvider("myProvider"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("dataset2"); - //add some required fields (indexed) - ds2.setProvider("myProvider"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("dataset3"); - //add some required fields (indexed) - ds3.setProvider("otherProvider"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - int nextPage = 0; - int allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.getAllDatasetsByProvider("myProvider", nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(2, allDatasetsCount); - } - - @Test - void testGetAllDatasetByIntermediateProvider() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setIntermediateProvider("myProvider"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("dataset2"); - //add some required fields (indexed) - ds2.setIntermediateProvider("myProvider"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("dataset3"); - //add some required fields (indexed) - ds3.setIntermediateProvider("otherProvider"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - int nextPage = 0; - int allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.getAllDatasetsByIntermediateProvider("myProvider", nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(2, allDatasetsCount); - } - - @Test - void testGetAllDatasetByDataProvider() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setDataProvider("myProvider"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("dataset2"); - //add some required fields (indexed) - ds2.setDataProvider("myProvider"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("dataset3"); - //add some required fields (indexed) - ds3.setDataProvider("otherProvider"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - int nextPage = 0; - int allDatestsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.getAllDatasetsByDataProvider("myProvider", nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatestsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(2, allDatestsCount); - } - - @Test - void testGetAllDatasetByOrganizationId() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setOrganizationId("organizationId1"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("dataset2"); - //add some required fields (indexed) - ds2.setOrganizationId("organizationId1"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("dataset3"); - //add some required fields (indexed) - ds3.setOrganizationId("organizationId2"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - // Check with pagination - int nextPage = 0; - int allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.getAllDatasetsByOrganizationId("organizationId1", nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - assertEquals(2, allDatasetsCount); - - // Check without pagination - final List resultWithoutPagination = - datasetDao.getAllDatasetsByOrganizationId("organizationId1"); - assertEquals(2, resultWithoutPagination.size()); - } - - @Test - void testGetAllDatasetByOrganizationName() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setOrganizationName("organizationName1"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("dataset2"); - //add some required fields (indexed) - ds2.setOrganizationName("organizationName1"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("dataset3"); - //add some required fields (indexed) - ds3.setOrganizationName("organizationName2"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - int nextPage = 0; - int allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.getAllDatasetsByOrganizationName("organizationName1", nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(2, allDatasetsCount); - } - - @Test - void testFindNextInSequenceDatasetId() { - DatasetIdSequence datasetIdSequence = new DatasetIdSequence(0); - provider.getDatastore().save(datasetIdSequence); - - int nextInSequenceDatasetId = datasetDao.findNextInSequenceDatasetId(); - assertEquals(1, nextInSequenceDatasetId); - - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setDatasetId("2"); - datasetDao.create(dataset); - - nextInSequenceDatasetId = datasetDao.findNextInSequenceDatasetId(); - assertEquals(3, nextInSequenceDatasetId); - } - - @Test - void testCheckAndCreateDatasetInEcloud() throws Exception { - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - datasetDao.create(dataset); - when(ecloudDataSetServiceClient.createDataSet(any(), any(), any())).thenReturn(null); - - datasetDao.checkAndCreateDatasetInEcloud(dataset); - } - - @Test - void testCheckAndCreateDatasetInEcloud_FieldWithEcloudIdIsAlreadyPresent() throws Exception { - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - dataset.setEcloudDatasetId("f525f64c-fea0-44bf-8c56-88f30962734c"); - datasetDao.create(dataset); - when(ecloudDataSetServiceClient.createDataSet(any(), any(), any())).thenReturn(null); - - datasetDao.checkAndCreateDatasetInEcloud(dataset); - } - - @Test - void testCheckAndCreateDatasetInEcloud_DataSetAlreadyExistsException() throws Exception { - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - datasetDao.create(dataset); - when(ecloudDataSetServiceClient.createDataSet(any(), any(), any())) - .thenThrow(new DataSetAlreadyExistsException("Dataset already exist, not recreating")); - - assertThrows(ExternalTaskException.class, - () -> datasetDao.checkAndCreateDatasetInEcloud(dataset)); - } - - @Test - void testCheckAndCreateDatasetInEcloud_MCSException() throws Exception { - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - datasetDao.create(dataset); - when(ecloudDataSetServiceClient.createDataSet(any(), any(), any())) - .thenThrow(new MCSException("An error has occurred during ecloud dataset creation.")); - - assertThrows(ExternalTaskException.class, - () -> datasetDao.checkAndCreateDatasetInEcloud(dataset)); - } - - @Test - void testSearchDatasetsBasedOnSearchString() { - Dataset ds1 = TestObjectFactory.createDataset("dataset1"); - //add some required fields (indexed) - ds1.setOrganizationName("organizationName1"); - ds1.setEcloudDatasetId("id1"); - ds1.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 1)); - datasetDao.create(ds1); - - Dataset ds2 = TestObjectFactory.createDataset("test_dataset_2"); - //add some required fields (indexed) - ds2.setOrganizationName("organizationName1"); - ds2.setEcloudDatasetId("id2"); - ds2.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 2)); - datasetDao.create(ds2); - - Dataset ds3 = TestObjectFactory.createDataset("test_3"); - //add some required fields (indexed) - ds3.setOrganizationName("organizationName2"); - ds3.setEcloudDatasetId("id3"); - ds3.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + 3)); - datasetDao.create(ds3); - - int nextPage = 0; - int allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.searchDatasetsBasedOnSearchString(Collections.singletonList("0"), - Collections.singletonList("test"), nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(2, allDatasetsCount); - - nextPage = 0; - allDatasetsCount = 0; - do { - ResponseListWrapper datasetResponseListWrapper = new ResponseListWrapper<>(); - datasetResponseListWrapper.setResultsAndLastPage( - datasetDao.searchDatasetsBasedOnSearchString(Arrays.asList("0", "1"), - Collections.singletonList("test"), nextPage), datasetDao - .getDatasetsPerRequest(), nextPage); - allDatasetsCount += datasetResponseListWrapper.getListSize(); - nextPage = datasetResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(3, allDatasetsCount); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetXsltDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetXsltDao.java deleted file mode 100644 index 8e0437925a..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDatasetXsltDao.java +++ /dev/null @@ -1,115 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; -import dev.morphia.DeleteOptions; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.util.Date; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-03-02 - */ -class TestDatasetXsltDao { - - private static DatasetXsltDao datasetXsltDao; - private static DatasetXslt datasetXslt; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static MorphiaDatastoreProviderImpl provider; - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - MongoClient mongoClient = MongoClients - .create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = new MorphiaDatastoreProviderImpl(mongoClient, "test"); - - datasetXsltDao = new DatasetXsltDao(provider); - - Dataset dataset = TestObjectFactory.createDataset("testName"); - datasetXslt = TestObjectFactory.createXslt(dataset); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @AfterEach - void cleanUp() { - Datastore datastore = provider.getDatastore(); - datastore.find(DatasetXslt.class).delete(new DeleteOptions().multi(true)); - } - - @Test - void testCreateRetrieveXslt() { - String xsltId = datasetXsltDao.create(datasetXslt).getId().toString(); - DatasetXslt storedDatasetXslt = datasetXsltDao.getById(xsltId); - assertEquals(datasetXslt.getDatasetId(), storedDatasetXslt.getDatasetId()); - assertEquals(datasetXslt.getXslt(), storedDatasetXslt.getXslt()); - } - - @Test - void testUpdateRetrieveXslt() { - datasetXsltDao.create(datasetXslt); - String xsltId = datasetXsltDao.update(datasetXslt); - - DatasetXslt storedDatasetXslt = datasetXsltDao.getById(xsltId); - assertEquals(datasetXslt.getDatasetId(), storedDatasetXslt.getDatasetId()); - assertEquals(datasetXslt.getXslt(), storedDatasetXslt.getXslt()); - } - - @Test - void testDeleteXslt() { - String xsltId = datasetXsltDao.create(datasetXslt).getId().toString(); - DatasetXslt storedDatasetXslt = datasetXsltDao.getById(xsltId); - datasetXsltDao.delete(storedDatasetXslt); - storedDatasetXslt = datasetXsltDao.getById(xsltId); - assertNull(storedDatasetXslt); - } - - @Test - void testDeleteAllByDatasetId() { - String xsltId1 = datasetXsltDao.create(datasetXslt).getId().toString(); - String xsltId2 = datasetXsltDao.create(datasetXslt).getId().toString(); - String xsltId3 = datasetXsltDao.create(datasetXslt).getId().toString(); - assertTrue(datasetXsltDao.deleteAllByDatasetId(datasetXslt.getDatasetId())); - assertNull(datasetXsltDao.getById(xsltId1)); - assertNull(datasetXsltDao.getById(xsltId2)); - assertNull(datasetXsltDao.getById(xsltId3)); - } - - @Test - void getLatestXsltForDatasetId() { - final Dataset dataset = TestObjectFactory.createDataset("testName"); - final DatasetXslt datasetXslt1 = TestObjectFactory.createXslt(dataset); - datasetXslt1.setCreatedDate(new Date(1000)); - final DatasetXslt datasetXslt2 = TestObjectFactory.createXslt(dataset); - datasetXslt2.setCreatedDate(new Date(2000)); - final DatasetXslt datasetXslt3 = TestObjectFactory.createXslt(dataset); - datasetXslt3.setCreatedDate(new Date(3000)); - - datasetXsltDao.create(datasetXslt1); - datasetXsltDao.create(datasetXslt2); - String xsltId3 = datasetXsltDao.create(datasetXslt3).getId().toString(); - DatasetXslt latestDatasetXsltForDatasetId = datasetXsltDao - .getLatestXsltForDatasetId(datasetXslt3.getDatasetId()); - assertEquals(xsltId3, latestDatasetXsltForDatasetId.getId().toString()); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDepublishRecordIdDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDepublishRecordIdDao.java deleted file mode 100644 index 539d2a9eb5..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestDepublishRecordIdDao.java +++ /dev/null @@ -1,331 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verifyNoInteractions; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.DeleteOptions; -import eu.europeana.metis.core.dataset.DepublishRecordId; -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProvider; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.rest.DepublishRecordIdView; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.time.Instant; -import java.util.Date; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class TestDepublishRecordIdDao { - - private static MorphiaDatastoreProvider provider; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static final long MAX_DEPUBLISH_RECORD_IDS_PER_DATASET = 5L; - private static DepublishRecordIdDao depublishRecordIdDao; - private static MongoClient mongoClient; - - @BeforeEach - void cleanUp() { - - new MorphiaDatastoreProviderImpl(mongoClient, "test").getDatastore() - .find(DepublishRecordId.class).delete(new DeleteOptions().multi(true)); - reset(provider, depublishRecordIdDao); - } - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - mongoClient = MongoClients.create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = spy(new MorphiaDatastoreProviderImpl(mongoClient, "test")); - depublishRecordIdDao = spy( - new DepublishRecordIdDao(provider, MAX_DEPUBLISH_RECORD_IDS_PER_DATASET)); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @Test - void createRecordIdsToBeDepublishedHappyScenarioTest() throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1001"); - - depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, setTest); - - assertEquals(1, provider.getDatastore().find(DepublishRecordId.class).count()); - assertEquals("1001", - provider.getDatastore().find(DepublishRecordId.class).first().getRecordId()); - } - - @Test - void createRecordIdsToBeDepublishedBigNumberOfCandidateRecordIdsTest() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1008", "1009", "1010", "1011", "1012", "1013"); - - Throwable exception = assertThrows(BadContentException.class, - () -> depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, setTest)); - - assertEquals( - "Can't add these records: this would violate the maximum number of records per dataset.", - exception.getMessage()); - } - - @Test - void createRecordIdsToBeDepublishedBigNumberOfDepublishedRecord() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1014"); - - doReturn(6L).when(depublishRecordIdDao).countDepublishRecordIdsForDataset(datasetId); - - Throwable exception = assertThrows(BadContentException.class, - () -> depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, setTest)); - - assertEquals( - "Can't add these records: this would violate the maximum number of records per dataset.", - exception.getMessage()); - } - - @Test - void deletePendingRecordIdsTest() throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1"); - final Set biggerThanAllowedSet = IntStream - .range(0, (int) MAX_DEPUBLISH_RECORD_IDS_PER_DATASET + 1).mapToObj(Integer::toString) - .collect(Collectors.toSet()); - - //Big list should fail - assertThrows(BadContentException.class, - () -> depublishRecordIdDao.deletePendingRecordIds(datasetId, biggerThanAllowedSet)); - - depublishRecordIdDao - .addRecords(setTest, datasetId, DepublicationStatus.PENDING_DEPUBLICATION, Instant.now()); - assertEquals(1, provider.getDatastore().find(DepublishRecordId.class).count()); - - depublishRecordIdDao.deletePendingRecordIds(datasetId, setTest); - assertEquals(0, provider.getDatastore().find(DepublishRecordId.class).count()); - } - - - @Test - void countSuccessfullyDepublishedRecordIdsForDatasetTest() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1003"); - - depublishRecordIdDao - .addRecords(setTest, datasetId, DepublicationStatus.DEPUBLISHED, Instant.now()); - long result = depublishRecordIdDao.countSuccessfullyDepublishedRecordIdsForDataset(datasetId); - assertEquals(1L, result); - } - - @Test - void getDepublishRecordIdsTest() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1004", "1005"); - - depublishRecordIdDao - .addRecords(setTest, datasetId, DepublicationStatus.PENDING_DEPUBLICATION, Instant.now()); - List find1004 = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, "1004"); - - List findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - - assertEquals(1, find1004.size()); - assertEquals("1004", find1004.get(0).getRecordId()); - assertEquals(2, findAll.size()); - } - - @Test - void getAllDepublishRecordIdsWithStatusTest() throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set setTest = Set.of("1"); - final Set biggerThanAllowedSet = IntStream - .range(0, (int) MAX_DEPUBLISH_RECORD_IDS_PER_DATASET + 1).mapToObj(Integer::toString) - .collect(Collectors.toSet()); - - //Big list should fail - assertThrows(BadContentException.class, - () -> depublishRecordIdDao.getAllDepublishRecordIdsWithStatus(datasetId, - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.DEPUBLISHED, biggerThanAllowedSet)); - - depublishRecordIdDao - .addRecords(setTest, datasetId, DepublicationStatus.DEPUBLISHED, Instant.now()); - Set result = depublishRecordIdDao.getAllDepublishRecordIdsWithStatus(datasetId, - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.DEPUBLISHED, setTest); - - assertEquals(1, result.size()); - - //Check also when requesting without recordIds set parameter - result = depublishRecordIdDao.getAllDepublishRecordIdsWithStatus(datasetId, - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.DEPUBLISHED); - - assertEquals(1, result.size()); - } - - @Test - void markRecordIdsWithDepublicationStatus_wrong_parametersTest() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set recordIdsSet = Set.of("1", "2"); - Date date = Date.from(Instant.now()); - - //Null depublication status - assertThrows(IllegalArgumentException.class, () -> depublishRecordIdDao - .markRecordIdsWithDepublicationStatus(datasetId, recordIdsSet, null, date)); - - //Blank dataset id - assertThrows(IllegalArgumentException.class, () -> depublishRecordIdDao - .markRecordIdsWithDepublicationStatus(null, recordIdsSet, - DepublicationStatus.PENDING_DEPUBLICATION, date)); - - //Depublished status but date null - assertThrows(IllegalArgumentException.class, () -> depublishRecordIdDao - .markRecordIdsWithDepublicationStatus(datasetId, recordIdsSet, - DepublicationStatus.DEPUBLISHED, null)); - } - - @Test - void markRecordIdsWithDepublicationStatus_all_recordIds_set_depublished_and_then_pendingTest() - throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set recordIdsSet = Set.of("1", "2"); - Date date = Date.from(Instant.now()); - - //Create recordIds - depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, recordIdsSet); - //Check stored recordIds - List findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertTrue(findAll.stream().allMatch(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.PENDING == depublishRecordIdView - .getDepublicationStatus() && null == depublishRecordIdView.getDepublicationDate())); - //Set to DEPUBLISHED - depublishRecordIdDao - .markRecordIdsWithDepublicationStatus(datasetId, null, DepublicationStatus.DEPUBLISHED, - date); - //Check stored recordIds - findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertTrue(findAll.stream().allMatch(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.DEPUBLISHED == depublishRecordIdView - .getDepublicationStatus() && date - .equals(Date.from(depublishRecordIdView.getDepublicationDate())))); - //Set to PENDING_DEPUBLICATION - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, null, - DepublicationStatus.PENDING_DEPUBLICATION, date); - //Check stored recordIds - findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertTrue(findAll.stream().allMatch(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.PENDING == depublishRecordIdView - .getDepublicationStatus() && null == depublishRecordIdView.getDepublicationDate())); - } - - @Test - void markRecordIdsWithDepublicationStatus_specified_recordIds_set_depublished_and_then_pendingTest() - throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set recordIdsToCreate = Set.of("1", "2", "3"); - final Set recordIdsToUpdate = Set.of("1", "2"); - Date date = Date.from(Instant.now()); - - //Create recordIds - depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, recordIdsToCreate); - //Check stored recordIds - List findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertTrue(findAll.stream().allMatch(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.PENDING == depublishRecordIdView - .getDepublicationStatus() && null == depublishRecordIdView.getDepublicationDate())); - //Set to DEPUBLISHED - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, recordIdsToUpdate, - DepublicationStatus.DEPUBLISHED, date); - //Check stored recordIds - findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertEquals(2, findAll.stream().filter(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.DEPUBLISHED == depublishRecordIdView - .getDepublicationStatus() && date - .equals(Date.from(depublishRecordIdView.getDepublicationDate()))).count()); - //Set to PENDING_DEPUBLICATION - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, recordIdsToUpdate, - DepublicationStatus.PENDING_DEPUBLICATION, date); - //Check stored recordIds - findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - assertEquals(3, findAll.stream().filter(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.PENDING == depublishRecordIdView - .getDepublicationStatus() && null == depublishRecordIdView.getDepublicationDate()) - .count()); - } - - @Test - void markRecordIdsWithDepublicationStatus_depublish_non_already_existing_recordIdsTest() - throws BadContentException { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set recordIdsToCreate = Set.of("1", "2", "3"); - final Set recordIdsToUpdate = Set.of("4", "5"); - Date date = Date.from(Instant.now()); - - //Create recordIds - depublishRecordIdDao.createRecordIdsToBeDepublished(datasetId, recordIdsToCreate); - //Set to DEPUBLISHED - depublishRecordIdDao.markRecordIdsWithDepublicationStatus(datasetId, recordIdsToUpdate, - DepublicationStatus.DEPUBLISHED, date); - - //Check stored recordIds - List findAll = depublishRecordIdDao - .getDepublishRecordIds(datasetId, 0, DepublishRecordIdSortField.DEPUBLICATION_STATE, - SortDirection.ASCENDING, null); - final long pendingCount = findAll.stream().filter(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.PENDING == depublishRecordIdView - .getDepublicationStatus() && null == depublishRecordIdView.getDepublicationDate()) - .count(); - final long depublishedCount = findAll.stream().filter(depublishRecordIdView -> - DepublishRecordIdView.DepublicationStatus.DEPUBLISHED == depublishRecordIdView - .getDepublicationStatus() && date - .equals(Date.from(depublishRecordIdView.getDepublicationDate()))).count(); - assertEquals(3, pendingCount); - assertEquals(2, depublishedCount); - assertEquals(recordIdsToCreate.size() + recordIdsToUpdate.size(), - pendingCount + depublishedCount); - } - - @Test - void getPageSizeTest() { - final DepublishRecordIdDao depublishRecordIdDao = new DepublishRecordIdDao(provider, - MAX_DEPUBLISH_RECORD_IDS_PER_DATASET, 3); - verifyNoInteractions(provider); - assertEquals(3, depublishRecordIdDao.getPageSize()); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestScheduledWorkflowDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestScheduledWorkflowDao.java deleted file mode 100644 index 8f3808f9e1..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestScheduledWorkflowDao.java +++ /dev/null @@ -1,225 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; -import dev.morphia.DeleteOptions; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.concurrent.ThreadLocalRandom; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-06 - */ -class TestScheduledWorkflowDao { - - private static ScheduledWorkflowDao scheduledWorkflowDao; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static MorphiaDatastoreProviderImpl provider; - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - MongoClient mongoClient = MongoClients - .create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = new MorphiaDatastoreProviderImpl(mongoClient, "test"); - - scheduledWorkflowDao = new ScheduledWorkflowDao(provider); - scheduledWorkflowDao.setScheduledWorkflowPerRequest(5); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @AfterEach - void cleanUp() { - Datastore datastore = provider.getDatastore(); - datastore.find(ScheduledWorkflow.class).delete(new DeleteOptions().multi(true)); - } - - @Test - void createScheduledUserWorkflow() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - String objectId = scheduledWorkflowDao.create(scheduledWorkflow).getId().toString(); - assertNotNull(objectId); - } - - @Test - void updateScheduledUserWorkflow() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - Date updatedPointerDate = new Date(); - scheduledWorkflow.setPointerDate(updatedPointerDate); - scheduledWorkflow.setScheduleFrequence(ScheduleFrequence.MONTHLY); - String objectId = scheduledWorkflowDao.update(scheduledWorkflow); - ScheduledWorkflow updatedScheduledWorkflow = scheduledWorkflowDao.getById(objectId); - assertEquals(ScheduleFrequence.MONTHLY, - updatedScheduledWorkflow.getScheduleFrequence()); - assertEquals(0, updatedPointerDate.compareTo(updatedScheduledWorkflow.getPointerDate())); - } - - @Test - void getById() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - String objectId = scheduledWorkflowDao.create(scheduledWorkflow).getId().toString(); - ScheduledWorkflow retrievedScheduledWorkflow = scheduledWorkflowDao - .getById(objectId); - assertEquals(scheduledWorkflow.getDatasetId(), - retrievedScheduledWorkflow.getDatasetId()); - assertEquals(scheduledWorkflow.getScheduleFrequence(), - retrievedScheduledWorkflow.getScheduleFrequence()); - assertEquals(scheduledWorkflow.getWorkflowPriority(), - retrievedScheduledWorkflow.getWorkflowPriority()); - assertEquals(0, scheduledWorkflow.getPointerDate() - .compareTo(retrievedScheduledWorkflow.getPointerDate())); - } - - @Test - void delete() { - assertFalse(scheduledWorkflowDao.delete(null)); - } - - @Test - void getScheduledUserWorkflow() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - ScheduledWorkflow retrievedScheduledWorkflow = scheduledWorkflowDao - .getScheduledWorkflow(scheduledWorkflow.getDatasetId()); - assertEquals(scheduledWorkflow.getScheduleFrequence(), - retrievedScheduledWorkflow.getScheduleFrequence()); - assertEquals(0, scheduledWorkflow.getPointerDate() - .compareTo(retrievedScheduledWorkflow.getPointerDate())); - } - - @Test - void getScheduledUserWorkflowByDatasetName() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - ScheduledWorkflow retrievedScheduledWorkflow = scheduledWorkflowDao - .getScheduledWorkflowByDatasetId(scheduledWorkflow.getDatasetId()); - assertEquals(scheduledWorkflow.getScheduleFrequence(), - retrievedScheduledWorkflow.getScheduleFrequence()); - assertEquals(0, scheduledWorkflow.getPointerDate() - .compareTo(retrievedScheduledWorkflow.getPointerDate())); - } - - @Test - void exists() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - assertTrue(scheduledWorkflowDao.exists(scheduledWorkflow)); - } - - @Test - void existsForDatasetName() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - assertNotNull( - scheduledWorkflowDao.existsForDatasetId(scheduledWorkflow.getDatasetId())); - } - - @Test - void deleteScheduledUserWorkflow() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - assertTrue( - scheduledWorkflowDao.deleteScheduledWorkflow(scheduledWorkflow.getDatasetId())); - } - - @Test - void deleteAllByDatasetId() { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflowDao.create(scheduledWorkflow); - assertTrue( - scheduledWorkflowDao.deleteAllByDatasetId(scheduledWorkflow.getDatasetId())); - } - - @Test - void getAllScheduledUserWorkflows() { - int scheduledUserWorkflowToCreate = - scheduledWorkflowDao.getScheduledWorkflowPerRequest() + 1; - for (int i = 0; i < scheduledUserWorkflowToCreate; i++) { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + i)); - scheduledWorkflowDao.create(scheduledWorkflow); - } - int nextPage = 0; - int allScheduledUserWorkflowsCount = 0; - do { - ResponseListWrapper scheduledUserWorkflowResponseListWrapper = new ResponseListWrapper<>(); - scheduledUserWorkflowResponseListWrapper.setResultsAndLastPage( - scheduledWorkflowDao.getAllScheduledWorkflows(ScheduleFrequence.ONCE, nextPage), - scheduledWorkflowDao.getScheduledWorkflowPerRequest(), nextPage); - allScheduledUserWorkflowsCount += scheduledUserWorkflowResponseListWrapper.getListSize(); - nextPage = scheduledUserWorkflowResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(scheduledUserWorkflowToCreate, allScheduledUserWorkflowsCount); - } - - @Test - void getAllScheduledUserWorkflowsByDateRangeONCE() { - int minutesRange = 10; - LocalDateTime lowerBound = LocalDateTime.now(); - LocalDateTime upperBound = lowerBound.plusMinutes(minutesRange); - - int scheduledUserWorkflowToCreate = - scheduledWorkflowDao.getScheduledWorkflowPerRequest() + 1; - for (int i = 0; i < scheduledUserWorkflowToCreate; i++) { - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setDatasetId(Integer.toString(TestObjectFactory.DATASETID + i)); - int plusMinutes = ThreadLocalRandom.current().nextInt(1, minutesRange); - Date pointerDate = Date - .from(lowerBound.plusMinutes(plusMinutes).atZone(ZoneId.systemDefault()).toInstant()); - scheduledWorkflow.setPointerDate(pointerDate); - scheduledWorkflowDao.create(scheduledWorkflow); - } - int nextPage = 0; - int allScheduledUserWorkflowsCount = 0; - do { - ResponseListWrapper scheduledUserWorkflowResponseListWrapper = new ResponseListWrapper<>(); - scheduledUserWorkflowResponseListWrapper.setResultsAndLastPage( - scheduledWorkflowDao - .getAllScheduledWorkflowsByDateRangeONCE(lowerBound, upperBound, nextPage), - scheduledWorkflowDao.getScheduledWorkflowPerRequest(), nextPage); - allScheduledUserWorkflowsCount += scheduledUserWorkflowResponseListWrapper.getListSize(); - nextPage = scheduledUserWorkflowResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(scheduledUserWorkflowToCreate, allScheduledUserWorkflowsCount); - } - -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowDao.java deleted file mode 100644 index e3174d1e6d..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowDao.java +++ /dev/null @@ -1,122 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; -import dev.morphia.DeleteOptions; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.util.List; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-04 - */ -class TestWorkflowDao { - - private static WorkflowDao workflowDao; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static MorphiaDatastoreProviderImpl provider; - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - MongoClient mongoClient = MongoClients - .create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = new MorphiaDatastoreProviderImpl(mongoClient, "test"); - - workflowDao = new WorkflowDao(provider); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @AfterEach - void cleanUp() { - Datastore datastore = provider.getDatastore(); - datastore.find(Workflow.class).delete(new DeleteOptions().multi(true)); - } - - @Test - void createUserWorkflow() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - String objectId = workflowDao.create(workflow).getId().toString(); - assertNotNull(objectId); - } - - @Test - void testUpdateUserWorkflow() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflowDao.create(workflow); - workflow.setMetisPluginsMetadata(null); - String objectId = workflowDao.update(workflow); - assertNotNull(objectId); - Workflow updatedWorkflow = workflowDao.getById(objectId); - assertEquals(0, updatedWorkflow.getMetisPluginsMetadata().size()); - } - - @Test - void getById() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - String objectId = workflowDao.create(workflow).getId().toString(); - Workflow retrievedWorkflow = workflowDao.getById(objectId); - assertEquals(workflow.getDatasetId(), retrievedWorkflow.getDatasetId()); - - List metisPluginsMetadata = workflow - .getMetisPluginsMetadata(); - List retrievedUserWorkflowMetisPluginsMetadata = retrievedWorkflow - .getMetisPluginsMetadata(); - assertEquals(metisPluginsMetadata.size(), - retrievedUserWorkflowMetisPluginsMetadata.size()); - assertEquals(retrievedUserWorkflowMetisPluginsMetadata.get(0).getPluginType(), - metisPluginsMetadata.get(0).getPluginType()); - } - - @Test - void delete() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflowDao.create(workflow); - assertTrue(workflowDao.delete(workflow)); - assertFalse(workflowDao.delete(workflow)); - } - - @Test - void deleteUserWorkflow() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflowDao.create(workflow); - assertTrue(workflowDao.deleteWorkflow(workflow.getDatasetId())); - assertFalse(workflowDao.deleteWorkflow(workflow.getDatasetId())); - } - - @Test - void exists() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflowDao.create(workflow); - assertTrue(workflowDao.workflowExistsForDataset(workflow.getDatasetId())); - assertFalse(workflowDao.workflowExistsForDataset(workflow.getDatasetId() + "X")); - } - - @Test - void getUserWorkflow() { - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflowDao.create(workflow); - assertNotNull(workflowDao.getWorkflow(workflow.getDatasetId())); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowExecutionDao.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowExecutionDao.java deleted file mode 100644 index 206813a00a..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowExecutionDao.java +++ /dev/null @@ -1,734 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; -import dev.morphia.DeleteOptions; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.mongo.MorphiaDatastoreProviderImpl; -import eu.europeana.metis.core.rest.ResponseListWrapper; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.SystemId; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-04 - */ -class TestWorkflowExecutionDao { - - private static WorkflowExecutionDao workflowExecutionDao; - private static EmbeddedLocalhostMongo embeddedLocalhostMongo; - private static MorphiaDatastoreProviderImpl provider; - - @BeforeAll - static void prepare() { - embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - MongoClient mongoClient = MongoClients - .create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - provider = new MorphiaDatastoreProviderImpl(mongoClient, "test"); - - workflowExecutionDao = spy(new WorkflowExecutionDao(provider)); - } - - @BeforeEach - void setup() { - workflowExecutionDao.setWorkflowExecutionsPerRequest(5); - workflowExecutionDao.setMaxServedExecutionListLength(10); - } - - @AfterAll - static void destroy() { - embeddedLocalhostMongo.stop(); - } - - @AfterEach - void cleanUp() { - Datastore datastore = provider.getDatastore(); - datastore.find(WorkflowExecution.class).delete(new DeleteOptions().multi(true)); - reset(workflowExecutionDao); - } - - @Test - void createUserWorkflowExecution() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - assertNotNull(objectId); - } - - @Test - void updateUserWorkflowExecution() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionDao.create(workflowExecution); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - Date updatedDate = new Date(); - workflowExecution.setUpdatedDate(updatedDate); - String objectId = workflowExecutionDao.update(workflowExecution); - assertNotNull(objectId); - WorkflowExecution updatedWorkflowExecution = workflowExecutionDao.getById(objectId); - assertEquals(WorkflowStatus.RUNNING, updatedWorkflowExecution.getWorkflowStatus()); - assertEquals(0, updatedDate.compareTo(updatedWorkflowExecution.getUpdatedDate())); - } - - @Test - void updateWorkflowPlugins() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - assertEquals(PluginStatus.INQUEUE, - workflowExecution.getMetisPlugins().get(0).getPluginStatus()); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - workflowExecution.getMetisPlugins().get(0).setPluginStatus(PluginStatus.RUNNING); - workflowExecutionDao.updateWorkflowPlugins(workflowExecution); - WorkflowExecution updatedWorkflowExecution = workflowExecutionDao.getById(objectId); - assertEquals(PluginStatus.RUNNING, - updatedWorkflowExecution.getMetisPlugins().get(0).getPluginStatus()); - } - - @Test - void updateMonitorInformation() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - Date createdDate = new Date(); - workflowExecution.setCreatedDate(createdDate); - assertEquals(PluginStatus.INQUEUE, - workflowExecution.getMetisPlugins().get(0).getPluginStatus()); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - Date startedDate = new Date(); - workflowExecution.setStartedDate(startedDate); - workflowExecution.setUpdatedDate(startedDate); - workflowExecution.getMetisPlugins().get(0).setPluginStatus(PluginStatus.RUNNING); - Date pluginUpdatedDate = new Date(); - if (workflowExecution.getMetisPlugins().get(0) instanceof AbstractExecutablePlugin) { - workflowExecution.getMetisPlugins().get(0).setUpdatedDate(pluginUpdatedDate); - } - workflowExecutionDao.updateMonitorInformation(workflowExecution); - WorkflowExecution updatedWorkflowExecution = workflowExecutionDao.getById(objectId); - assertEquals(WorkflowStatus.RUNNING, updatedWorkflowExecution.getWorkflowStatus()); - assertEquals(0, createdDate.compareTo(updatedWorkflowExecution.getCreatedDate())); - assertEquals(0, startedDate.compareTo(updatedWorkflowExecution.getStartedDate())); - assertEquals(0, startedDate.compareTo(updatedWorkflowExecution.getUpdatedDate())); - assertEquals(PluginStatus.RUNNING, - updatedWorkflowExecution.getMetisPlugins().get(0).getPluginStatus()); - if (workflowExecution.getMetisPlugins().get(0) instanceof AbstractExecutablePlugin) { - assertEquals(0, pluginUpdatedDate.compareTo( - updatedWorkflowExecution.getMetisPlugins().get(0).getUpdatedDate())); - } - } - - @Test - void testSetCancellingState() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - workflowExecutionDao.setCancellingState(workflowExecution, null); - WorkflowExecution cancellingWorkflowExecution = workflowExecutionDao - .getById(objectId); - assertTrue(cancellingWorkflowExecution.isCancelling()); - assertEquals(SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name(), - cancellingWorkflowExecution.getCancelledBy()); - } - - @Test - void getById() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - assertFalse(workflowExecution.isCancelling()); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - WorkflowExecution retrievedWorkflowExecution = workflowExecutionDao - .getById(objectId); - assertEquals(workflowExecution.getCreatedDate(), - retrievedWorkflowExecution.getCreatedDate()); - assertEquals(workflowExecution.getDatasetId(), - retrievedWorkflowExecution.getDatasetId()); - assertEquals(workflowExecution.getWorkflowPriority(), - retrievedWorkflowExecution.getWorkflowPriority()); - assertFalse(retrievedWorkflowExecution.isCancelling()); - assertEquals(workflowExecution.getMetisPlugins().get(0).getPluginType(), - retrievedWorkflowExecution.getMetisPlugins().get(0).getPluginType()); - } - - @Test - void delete() { - assertFalse(workflowExecutionDao.delete(null)); - } - - @Test - void getRunningOrInQueueExecution() { - WorkflowExecution workflowExecutionRunning = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionRunning.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecutionDao.create(workflowExecutionRunning); - WorkflowExecution runningOrInQueueExecution = workflowExecutionDao - .getRunningOrInQueueExecution(workflowExecutionRunning.getDatasetId()); - assertEquals(WorkflowStatus.RUNNING, runningOrInQueueExecution.getWorkflowStatus()); - } - - @Test - void exists() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionDao.create(workflowExecution); - assertTrue(workflowExecutionDao.exists(workflowExecution)); - } - - @Test - void existsAndNotCompleted() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - assertEquals(objectId, workflowExecutionDao - .existsAndNotCompleted(workflowExecution.getDatasetId())); - } - - @Test - void existsAndNotCompletedReturnNull() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecutionDao.create(workflowExecution); - assertNull( - workflowExecutionDao.existsAndNotCompleted(workflowExecution.getDatasetId())); - } - - @Test - void getFirstOrLastFinishedPlugin_CheckFirstAndLast() { - - WorkflowExecution workflowExecutionFirst = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecutionFirst.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecutionFirst.setDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - - WorkflowExecution workflowExecutionSecond = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecutionSecond.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecutionSecond.setDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - for (int i = 0; i < workflowExecutionSecond.getMetisPlugins().size(); i++) { - workflowExecutionFirst.getMetisPlugins().get(i).setFinishedDate(new Date()); - workflowExecutionSecond.getMetisPlugins().get(i).setFinishedDate( - new Date( - workflowExecutionFirst.getMetisPlugins().get(i).getFinishedDate().getTime() + 1000)); - workflowExecutionFirst.getMetisPlugins().get(i).setPluginStatus(PluginStatus.FINISHED); - workflowExecutionSecond.getMetisPlugins().get(i).setPluginStatus(PluginStatus.FINISHED); - } - - final String executionFirstId = workflowExecutionDao.create(workflowExecutionFirst).getId().toString(); - final String executionSecondId = workflowExecutionDao.create(workflowExecutionSecond).getId().toString(); - - PluginWithExecutionId latestFinishedWorkflowExecution = workflowExecutionDao - .getFirstOrLastFinishedPlugin(Integer.toString(TestObjectFactory.DATASETID), - EnumSet.of(PluginType.OAIPMH_HARVEST), false); - assertEquals(latestFinishedWorkflowExecution.getPlugin().getFinishedDate(), - workflowExecutionSecond.getMetisPlugins().get(0).getFinishedDate()); - assertEquals(executionSecondId, latestFinishedWorkflowExecution.getExecutionId()); - - PluginWithExecutionId firstFinishedWorkflowExecution = workflowExecutionDao - .getFirstOrLastFinishedPlugin(Integer.toString(TestObjectFactory.DATASETID), - EnumSet.of(PluginType.OAIPMH_HARVEST), true); - assertEquals(firstFinishedWorkflowExecution.getPlugin().getFinishedDate(), - workflowExecutionFirst.getMetisPlugins().get(0).getFinishedDate()); - assertEquals(executionFirstId, firstFinishedWorkflowExecution.getExecutionId()); - } - - @Test - void getFirstOrLastFinishedPlugin_isNull() { - PluginWithExecutionId latestFinishedWorkflowExecution = workflowExecutionDao - .getFirstOrLastFinishedPlugin(Integer.toString(TestObjectFactory.DATASETID), - EnumSet.of(PluginType.OAIPMH_HARVEST), false); - assertNull(latestFinishedWorkflowExecution); - } - - @Test - void getFirstOrLastFinishedPlugin_invalidPluginTypes() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - assertThrows(IllegalArgumentException.class, - () -> workflowExecutionDao.getFirstOrLastFinishedPlugin(datasetId, null, true)); - assertThrows(IllegalArgumentException.class, () -> workflowExecutionDao - .getFirstOrLastFinishedPlugin(datasetId, Collections.emptySet(), false)); - final Set setWithNull = new HashSet<>(); - setWithNull.add(null); - setWithNull.add(PluginType.OAIPMH_HARVEST); - assertThrows(IllegalArgumentException.class, - () -> workflowExecutionDao.getFirstOrLastFinishedPlugin(datasetId, setWithNull, true)); - } - - private static class NonExecutableEnrichmentPlugin extends - AbstractMetisPlugin { - - NonExecutableEnrichmentPlugin() { - super(PluginType.ENRICHMENT); - } - } - - @Test - void getLatestSuccessfulExecutablePlugin_CheckExecutable() { - - // Create executable harvest and non-executable enrichment plugin - final AbstractMetisPlugin nonExecutableEnrichment = new NonExecutableEnrichmentPlugin(); - final AbstractMetisPlugin executableHarvest = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - - // Mock the dependent method - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doReturn(new PluginWithExecutionId("", nonExecutableEnrichment)) - .when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.ENRICHMENT), false); - doReturn(new PluginWithExecutionId("", executableHarvest)) - .when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.OAIPMH_HARVEST), false); - doReturn(null).when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.NORMALIZATION), false); - - // Check that the enrichment IS NOT returned by the method. - assertNull(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.ENRICHMENT), false)); - - // Check that the harvesting IS returned by the method. - assertSame(executableHarvest, - workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.OAIPMH_HARVEST), false).getPlugin()); - - // Check that the normalization IS NOT returned by the method. - assertNull(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.NORMALIZATION), false)); - } - - @Test - void getLatestSuccessfulExecutablePlugin_CheckDataStatuses() { - - // Create harvesting plugin with default status - final AbstractExecutablePlugin defaultPlugin = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - defaultPlugin.setDataStatus(null); - - // Create transformation plugin with valid status - final AbstractExecutablePlugin validPlugin = ExecutablePluginFactory - .createPlugin(new TransformationPluginMetadata()); - validPlugin.setDataStatus(DataStatus.VALID); - - // Create unreachable enrichment plugin with valid status - final AbstractExecutablePlugin unreachablePlugin = ExecutablePluginFactory - .createPlugin(new EnrichmentPluginMetadata()); - unreachablePlugin.setDataStatus(DataStatus.VALID); - - // Create enrichment plugin with deprecated status - final AbstractExecutablePlugin deprecatedPlugin = ExecutablePluginFactory - .createPlugin(new EnrichmentPluginMetadata()); - deprecatedPlugin.setDataStatus(DataStatus.DEPRECATED); - - // Mock the dependent method - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doReturn(new PluginWithExecutionId("", defaultPlugin)).when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.OAIPMH_HARVEST), false); - doReturn(new PluginWithExecutionId("", validPlugin)).when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.TRANSFORMATION), false); - doReturn(new PluginWithExecutionId("", deprecatedPlugin)) - .when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, EnumSet.of(PluginType.ENRICHMENT), false); - - // Try to find the default plugin - assertSame(defaultPlugin, workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.OAIPMH_HARVEST), false).getPlugin()); - assertSame(defaultPlugin, workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.OAIPMH_HARVEST), true).getPlugin()); - - // Try to find the valid plugin - assertSame(validPlugin, workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.TRANSFORMATION), false).getPlugin()); - assertSame(validPlugin, workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.TRANSFORMATION), true).getPlugin()); - - // Try to find the deprecated plugin - assertSame(deprecatedPlugin, workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.ENRICHMENT), false).getPlugin()); - assertNull(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.ENRICHMENT), true)); - } - - @Test - void getLatestSuccessfulExecutablePlugin_invalidPluginTypes() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - assertThrows(IllegalArgumentException.class, - () -> workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, null, true)); - assertThrows(IllegalArgumentException.class, () -> workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(datasetId, Collections.emptySet(), false)); - final Set setWithNull = new HashSet<>(); - setWithNull.add(null); - setWithNull.add(ExecutablePluginType.OAIPMH_HARVEST); - assertThrows(IllegalArgumentException.class, () -> workflowExecutionDao - .getLatestSuccessfulExecutablePlugin(datasetId, setWithNull, true)); - } - - @Test - void getFirstSuccessfulPlugin() { - - // Set up the mock - final AbstractExecutablePlugin plugin = ExecutablePluginFactory - .createPlugin(new MediaProcessPluginMetadata()); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set pluginTypes = EnumSet.of(PluginType.ENRICHMENT, PluginType.MEDIA_PROCESS); - final PluginWithExecutionId pluginWithExecutionId = new PluginWithExecutionId<>( - "", plugin); - doReturn(pluginWithExecutionId).when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, pluginTypes, true); - - // Check the call - assertSame(pluginWithExecutionId, - workflowExecutionDao.getFirstSuccessfulPlugin(datasetId, pluginTypes)); - verify(workflowExecutionDao, times(1)) - .getFirstOrLastFinishedPlugin(datasetId, pluginTypes, true); - verify(workflowExecutionDao, times(1)) - .getFirstOrLastFinishedPlugin(anyString(), any(), anyBoolean()); - } - - @Test - void getLatestSuccessfulPlugin() { - - // Set up the mock - final AbstractExecutablePlugin plugin = ExecutablePluginFactory - .createPlugin(new MediaProcessPluginMetadata()); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set pluginTypes = EnumSet.of(PluginType.ENRICHMENT, PluginType.MEDIA_PROCESS); - final PluginWithExecutionId pluginWithExecutionId = new PluginWithExecutionId<>( - "", plugin); - doReturn(pluginWithExecutionId).when(workflowExecutionDao) - .getFirstOrLastFinishedPlugin(datasetId, pluginTypes, false); - - // Check the call - assertSame(pluginWithExecutionId, - workflowExecutionDao.getLatestSuccessfulPlugin(datasetId, pluginTypes)); - verify(workflowExecutionDao, times(1)) - .getFirstOrLastFinishedPlugin(datasetId, pluginTypes, false); - verify(workflowExecutionDao, times(1)) - .getFirstOrLastFinishedPlugin(anyString(), any(), anyBoolean()); - } - - @Test - void getWorkflowExecutionByExecutionId() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - WorkflowExecution runningWorkflowExecution = workflowExecutionDao - .getById(workflowExecution.getId().toString()); - assertEquals(objectId, runningWorkflowExecution.getId().toString()); - } - - @Test - void getAllUserWorkflowExecutions() { - int userWorkflowExecutionsToCreate = - workflowExecutionDao.getMaxServedExecutionListLength() + 1; - for (int i = 0; i < userWorkflowExecutionsToCreate; i++) { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionDao.create(workflowExecution); - } - HashSet workflowStatuses = new HashSet<>(); - workflowStatuses.add(WorkflowStatus.INQUEUE); - int nextPage = 0; - int allUserWorkflowsExecutionsCount = 0; - do { - ResponseListWrapper userWorkflowExecutionResponseListWrapper = new ResponseListWrapper<>(); - final ResultList result = workflowExecutionDao.getAllWorkflowExecutions( - Collections.singleton(Integer.toString(TestObjectFactory.DATASETID)), workflowStatuses, - DaoFieldNames.ID, false, nextPage, 1, true); - assertFalse(result.isMaxResultCountReached()); - userWorkflowExecutionResponseListWrapper.setResultsAndLastPage(result.getResults(), - workflowExecutionDao.getWorkflowExecutionsPerRequest(), nextPage, - result.isMaxResultCountReached()); - allUserWorkflowsExecutionsCount += userWorkflowExecutionResponseListWrapper.getListSize(); - nextPage = userWorkflowExecutionResponseListWrapper.getNextPage(); - } while (nextPage != -1); - - assertEquals(userWorkflowExecutionsToCreate, allUserWorkflowsExecutionsCount); - } - - @Test - void getAllUserWorkflowExecutionsAscending() { - int userWorkflowExecutionsToCreate = - workflowExecutionDao.getMaxServedExecutionListLength() + 1; - for (int i = 0; i < userWorkflowExecutionsToCreate; i++) { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setCreatedDate(new Date(1000 * i)); - workflowExecutionDao.create(workflowExecution); - } - HashSet workflowStatuses = new HashSet<>(); - workflowStatuses.add(WorkflowStatus.INQUEUE); - int nextPage = 0; - int allUserWorkflowsExecutionsCount = 0; - do { - ResponseListWrapper userWorkflowExecutionResponseListWrapper = new ResponseListWrapper<>(); - final ResultList result = workflowExecutionDao.getAllWorkflowExecutions( - Collections.singleton(Integer.toString(TestObjectFactory.DATASETID)), - workflowStatuses, DaoFieldNames.CREATED_DATE, true, nextPage, 1, false); - userWorkflowExecutionResponseListWrapper.setResultsAndLastPage(result.getResults(), - workflowExecutionDao.getWorkflowExecutionsPerRequest(), nextPage, - result.isMaxResultCountReached()); - if (!result.getResults().isEmpty()) { - WorkflowExecution beforeWorkflowExecution = - userWorkflowExecutionResponseListWrapper.getResults().get(0); - for (int i = 1; i < userWorkflowExecutionResponseListWrapper.getListSize(); i++) { - WorkflowExecution afterWorkflowExecution = - userWorkflowExecutionResponseListWrapper.getResults().get(i); - assertTrue(beforeWorkflowExecution.getCreatedDate() - .before(afterWorkflowExecution.getCreatedDate())); - beforeWorkflowExecution = afterWorkflowExecution; - } - } - allUserWorkflowsExecutionsCount += userWorkflowExecutionResponseListWrapper.getListSize(); - nextPage = userWorkflowExecutionResponseListWrapper.getNextPage(); - - final boolean hasAll = - allUserWorkflowsExecutionsCount == workflowExecutionDao.getMaxServedExecutionListLength(); - assertEquals(hasAll, result.isMaxResultCountReached()); - } while (nextPage != -1); - - assertEquals(workflowExecutionDao.getMaxServedExecutionListLength(), - allUserWorkflowsExecutionsCount); - } - - @Test - void isCancelled() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setWorkflowStatus(WorkflowStatus.CANCELLED); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - assertTrue(workflowExecutionDao.isCancelled(new ObjectId(objectId))); - } - - @Test - void isCancelling() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecution.setCancelling(true); - String objectId = workflowExecutionDao.create(workflowExecution).getId().toString(); - assertTrue(workflowExecutionDao.isCancelling(new ObjectId(objectId))); - } - - @Test - void deleteAllByDatasetId() { - WorkflowExecution workflowExecution = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionDao.create(workflowExecution); - assertTrue( - workflowExecutionDao.deleteAllByDatasetId(workflowExecution.getDatasetId())); - } - - @Test - void getWorkflowExecutionOverview() { - - final WorkflowExecution finishedOld = TestObjectFactory.createWorkflowExecutionObject(); - finishedOld.setWorkflowStatus(WorkflowStatus.FINISHED); - finishedOld.setCreatedDate(new Date(2)); - final String finishedOldId = workflowExecutionDao.create(finishedOld).getId().toString(); - - final WorkflowExecution cancelledOld = TestObjectFactory.createWorkflowExecutionObject(); - cancelledOld.setWorkflowStatus(WorkflowStatus.CANCELLED); - cancelledOld.setCreatedDate(new Date(1)); - final Date startedDateOfCancelledPlugin = new Date(10); - final List metisPlugins = cancelledOld.getMetisPlugins(); - metisPlugins.forEach(metisPlugin -> { - metisPlugin.setPluginStatus(PluginStatus.CANCELLED); - metisPlugin.setStartedDate(startedDateOfCancelledPlugin); - }); - final String cancelledOldId = workflowExecutionDao.create(cancelledOld).getId().toString(); - - final WorkflowExecution failedOld = TestObjectFactory.createWorkflowExecutionObject(); - failedOld.setWorkflowStatus(WorkflowStatus.FAILED); - failedOld.setCreatedDate(new Date(0)); - final String failedOldId = workflowExecutionDao.create(failedOld).getId().toString(); - - final WorkflowExecution finishedNew = TestObjectFactory.createWorkflowExecutionObject(); - finishedNew.setWorkflowStatus(WorkflowStatus.FINISHED); - finishedNew.setCreatedDate(new Date(1000)); - final String finishedNewId = workflowExecutionDao.create(finishedNew).getId().toString(); - - final WorkflowExecution runningOld = TestObjectFactory.createWorkflowExecutionObject(); - runningOld.setWorkflowStatus(WorkflowStatus.RUNNING); - runningOld.setCreatedDate(new Date(0)); - final String runningOldId = workflowExecutionDao.create(runningOld).getId().toString(); - - final WorkflowExecution runningNew = TestObjectFactory.createWorkflowExecutionObject(); - runningNew.setWorkflowStatus(WorkflowStatus.RUNNING); - runningNew.setCreatedDate(new Date(1000)); - final String runningNewId = workflowExecutionDao.create(runningNew).getId().toString(); - - final WorkflowExecution queuedOld = TestObjectFactory.createWorkflowExecutionObject(); - queuedOld.setWorkflowStatus(WorkflowStatus.INQUEUE); - queuedOld.setCreatedDate(new Date(0)); - final String queuedOldId = workflowExecutionDao.create(queuedOld).getId().toString(); - - final WorkflowExecution queuedNew = TestObjectFactory.createWorkflowExecutionObject(); - queuedNew.setWorkflowStatus(WorkflowStatus.INQUEUE); - queuedNew.setCreatedDate(new Date(1000)); - final String queuedNewId = workflowExecutionDao.create(queuedNew).getId().toString(); - - // Expected order - final List expectedOrder = Arrays - .asList(queuedNewId, queuedOldId, runningNewId, runningOldId, finishedNewId, finishedOldId, - cancelledOldId, failedOldId); - - // Try without filtering on dataset. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithoutFilter = workflowExecutionDao - .getWorkflowExecutionsOverview(null, null, null, null, null, 0, 1); - assertNotNull(resultWithoutFilter); - assertFalse(resultWithoutFilter.isMaxResultCountReached()); - final List actualOrderWithoutFilter = resultWithoutFilter.getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(expectedOrder, actualOrderWithoutFilter); - - // Try with empty dataset ids Set. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithEmptyDatasetIdsSet = workflowExecutionDao - .getWorkflowExecutionsOverview(Collections.emptySet(), null, null, null, null, 0, 1); - assertNotNull(resultWithEmptyDatasetIdsSet); - assertFalse(resultWithEmptyDatasetIdsSet.isMaxResultCountReached()); - final List actualOrderWithEmptyDatasetIdsSet = resultWithEmptyDatasetIdsSet.getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(expectedOrder, actualOrderWithEmptyDatasetIdsSet); - - // Try with filtering on dataset. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithFilter = workflowExecutionDao - .getWorkflowExecutionsOverview(Collections.singleton("" + TestObjectFactory.DATASETID), - null, null, null, null, 0, - 1); - assertNotNull(resultWithFilter); - assertFalse(resultWithFilter.isMaxResultCountReached()); - final List actualOrderWithFilter = resultWithFilter.getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(expectedOrder, actualOrderWithFilter); - - // Try with filtering on pluginStatuses and pluginTypes. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithFilterPlugin = workflowExecutionDao - .getWorkflowExecutionsOverview(null, - EnumSet.of(PluginStatus.CANCELLED), EnumSet.of(PluginType.OAIPMH_HARVEST), - startedDateOfCancelledPlugin, null, 0, 1); - assertNotNull(resultWithFilterPlugin); - assertFalse(resultWithFilterPlugin.isMaxResultCountReached()); - final List actualOrderWithFilterPlugin = resultWithFilterPlugin.getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(Collections.singletonList(cancelledOldId), actualOrderWithFilterPlugin); - assertEquals(2, - resultWithFilterPlugin.getResults().get(0).getExecution().getMetisPlugins().size()); - - // Try with filtering on pluginStatuses and pluginTypes that do not exist. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithFilterPluginNoItems = workflowExecutionDao - .getWorkflowExecutionsOverview(null, - EnumSet.of(PluginStatus.FINISHED), EnumSet.of(PluginType.OAIPMH_HARVEST), - null, null, 0, 1); - assertNotNull(resultWithFilterPluginNoItems); - assertFalse(resultWithFilterPluginNoItems.isMaxResultCountReached()); - final List actualOrderWithFilterPluginNoItems = resultWithFilterPluginNoItems - .getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(0, actualOrderWithFilterPluginNoItems.size()); - - // Try with filter on non-existing dataset. - workflowExecutionDao.setWorkflowExecutionsPerRequest(expectedOrder.size()); - final ResultList resultWithInvalidFilter = workflowExecutionDao - .getWorkflowExecutionsOverview( - Collections.singleton("" + (TestObjectFactory.DATASETID + 1)), null, null, null, null, - 0, 1); - assertNotNull(resultWithInvalidFilter); - assertFalse(resultWithInvalidFilter.isMaxResultCountReached()); - assertTrue(resultWithInvalidFilter.getResults().isEmpty()); - - // Try pagination - final int pageSize = 2; - final int pageNumber = 1; - final int pageCount = 2; - workflowExecutionDao.setWorkflowExecutionsPerRequest(pageSize); - final ResultList resultWithPaging = workflowExecutionDao - .getWorkflowExecutionsOverview(null, null, null, null, null, pageNumber, pageCount); - assertNotNull(resultWithPaging); - assertFalse(resultWithPaging.isMaxResultCountReached()); - final List actualOrderWithPaging = resultWithPaging.getResults().stream() - .map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).toList(); - assertEquals(expectedOrder.subList(pageSize * pageNumber, pageSize * (pageNumber + pageCount)), - actualOrderWithPaging); - - // Test the max limit for results get last full page - workflowExecutionDao.setMaxServedExecutionListLength(4); - final ResultList fullResultWithMaxServed = workflowExecutionDao - .getWorkflowExecutionsOverview(null, null, null, null, null, 1, 1); - assertNotNull(fullResultWithMaxServed); - assertTrue(fullResultWithMaxServed.isMaxResultCountReached()); - assertEquals(2, fullResultWithMaxServed.getResults().size()); - - // Test the max limit for results get last partial page - workflowExecutionDao.setMaxServedExecutionListLength(3); - final ResultList partialResultWithMaxServed = workflowExecutionDao - .getWorkflowExecutionsOverview(null, null, null, null, null, 1, 1); - assertNotNull(partialResultWithMaxServed); - assertTrue(partialResultWithMaxServed.isMaxResultCountReached()); - assertEquals(1, partialResultWithMaxServed.getResults().size()); - - // Test the max limit for results get first empty page - workflowExecutionDao.setMaxServedExecutionListLength(2); - final ResultList emptyResultWithMaxServed = workflowExecutionDao - .getWorkflowExecutionsOverview(null, null, null, null, null, 1, 1); - assertNotNull(emptyResultWithMaxServed); - assertTrue(emptyResultWithMaxServed.isMaxResultCountReached()); - assertTrue(emptyResultWithMaxServed.getResults().isEmpty()); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowValidationUtils.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowValidationUtils.java deleted file mode 100644 index 2a91d7cb4e..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/dao/TestWorkflowValidationUtils.java +++ /dev/null @@ -1,272 +0,0 @@ -package eu.europeana.metis.core.dao; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.dataset.DepublishRecordId.DepublicationStatus; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.DepublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -public class TestWorkflowValidationUtils { - - private static final String DATASET_ID = Integer.toString(TestObjectFactory.DATASETID); - private static WorkflowValidationUtils validationUtils; - private static DepublishRecordIdDao depublishRecordIdDao; - private static DataEvolutionUtils dataEvolutionUtils; - - @BeforeAll - static void prepare() { - depublishRecordIdDao = mock(DepublishRecordIdDao.class); - dataEvolutionUtils = mock(DataEvolutionUtils.class); - validationUtils = spy(new WorkflowValidationUtils(depublishRecordIdDao, dataEvolutionUtils)); - } - - @AfterEach - void cleanUp() { - reset(validationUtils, depublishRecordIdDao, dataEvolutionUtils); - } - - @Test - void testValidateWorkflowPlugins_testWorkflowComposition() throws GenericMetisException { - - // Create successful predecessor - final ExecutablePluginType predecessorType = ExecutablePluginType.OAIPMH_HARVEST; - final AbstractExecutablePlugin predecessor = - ExecutablePluginFactory.createPlugin(new OaipmhHarvestPluginMetadata()); - predecessor.setExecutionProgress(new ExecutionProgress()); - predecessor.getExecutionProgress().setProcessedRecords(1); - predecessor.getExecutionProgress().setErrors(0); - doReturn(new PluginWithExecutionId<>("", predecessor)).when(dataEvolutionUtils) - .computePredecessorPlugin(any(), eq(predecessorType), eq(DATASET_ID)); - - // Test allowed workflow - assertSame(predecessor, validationUtils.validateWorkflowPlugins(createWorkflow( - ExecutablePluginType.OAIPMH_HARVEST), predecessorType).getPlugin()); - assertSame(predecessor, validationUtils.validateWorkflowPlugins(createWorkflow( - ExecutablePluginType.NORMALIZATION, ExecutablePluginType.ENRICHMENT, - ExecutablePluginType.LINK_CHECKING), predecessorType).getPlugin()); - assertSame(predecessor, validationUtils.validateWorkflowPlugins(createWorkflow( - ExecutablePluginType.ENRICHMENT, ExecutablePluginType.OAIPMH_HARVEST), predecessorType) - .getPlugin()); - - // Test workflow with empty list - assertThrows(BadContentException.class, () -> validationUtils - .validateWorkflowPlugins(createWorkflow(), predecessorType)); - - // Test workflow with null list - final Workflow workflowWithNullList = new Workflow(); - workflowWithNullList.setMetisPluginsMetadata(null); - assertThrows(BadContentException.class, () -> validationUtils - .validateWorkflowPlugins(workflowWithNullList, predecessorType)); - - // Test workflow with plugin with invalid type - assertThrows(BadContentException.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.NORMALIZATION, null, - ExecutablePluginType.LINK_CHECKING), predecessorType)); - - // Test workflow with two plugins, one of which is depublish - Workflow workflowDepublishAndOai = new Workflow(); - workflowDepublishAndOai.setDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPluginMetadata.setEnabled(true); - DepublishPluginMetadata depublishPluginMetadata = new DepublishPluginMetadata(); - depublishPluginMetadata.setEnabled(true); - depublishPluginMetadata.setDatasetDepublish(true); - List abstractMetisPluginMetadata = new ArrayList<>(2); - abstractMetisPluginMetadata.add(oaipmhHarvestPluginMetadata); - abstractMetisPluginMetadata.add(depublishPluginMetadata); - workflowDepublishAndOai.setMetisPluginsMetadata(abstractMetisPluginMetadata); - assertThrows(BadContentException.class, - () -> validationUtils.validateWorkflowPlugins(workflowDepublishAndOai, null)); - - // Test if workflow contains record depublish that record ids exist - Workflow workflowDepublish = new Workflow(); - workflowDepublish.setDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - depublishPluginMetadata.setDatasetDepublish(false); - abstractMetisPluginMetadata.clear(); - abstractMetisPluginMetadata.add(depublishPluginMetadata); - workflowDepublish.setMetisPluginsMetadata(abstractMetisPluginMetadata); - when(depublishRecordIdDao - .getAllDepublishRecordIdsWithStatus(workflowDepublish.getDatasetId(), - DepublishRecordIdSortField.DEPUBLICATION_STATE, SortDirection.ASCENDING, - DepublicationStatus.PENDING_DEPUBLICATION)).thenReturn(Collections.emptySet()); - assertThrows(BadContentException.class, () -> validationUtils - .validateWorkflowPlugins(workflowDepublish, null)); - - // Test workflow starting with link checking. - assertSame(predecessor, validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.LINK_CHECKING), predecessorType).getPlugin()); - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.LINK_CHECKING, ExecutablePluginType.TRANSFORMATION), - predecessorType)); - - // Test workflow with gaps - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.TRANSFORMATION, ExecutablePluginType.ENRICHMENT), - predecessorType)); - - // Test workflow with duplicate types - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.TRANSFORMATION, ExecutablePluginType.ENRICHMENT, - ExecutablePluginType.ENRICHMENT), predecessorType)); - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.TRANSFORMATION, ExecutablePluginType.LINK_CHECKING, - ExecutablePluginType.LINK_CHECKING), predecessorType)); - - // Test workflow with disabled plugins: valid before disabling, but invalid after. - final Workflow workflowWithDisabledPlugins = createWorkflow( - ExecutablePluginType.NORMALIZATION, - ExecutablePluginType.ENRICHMENT, ExecutablePluginType.MEDIA_PROCESS); - assertSame(predecessor, validationUtils.validateWorkflowPlugins(workflowWithDisabledPlugins, - predecessorType).getPlugin()); - when(workflowWithDisabledPlugins.getMetisPluginsMetadata().get(1).isEnabled()) - .thenReturn(false); - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - workflowWithDisabledPlugins, predecessorType)); - - // Test workflow with bad predecessor - doThrow(PluginExecutionNotAllowed.class).when(dataEvolutionUtils) - .computePredecessorPlugin(any(), eq(predecessorType), eq(DATASET_ID)); - assertThrows(PluginExecutionNotAllowed.class, () -> validationUtils.validateWorkflowPlugins( - createWorkflow(ExecutablePluginType.ENRICHMENT, ExecutablePluginType.OAIPMH_HARVEST), - predecessorType)); - } - - private Workflow createWorkflow(ExecutablePluginType... pluginTypes) { - final Workflow workflow = new Workflow(); - workflow.setDatasetId(DATASET_ID); - workflow.setMetisPluginsMetadata(Arrays.stream(pluginTypes).map(type -> { - final AbstractExecutablePluginMetadata plugin = mock( - AbstractExecutablePluginMetadata.class); - when(plugin.isEnabled()).thenReturn(true); - doReturn(type).when(plugin).getExecutablePluginType(); - return plugin; - }).collect(Collectors.toList())); - return workflow; - } - - @Test - void testValidateWorkflowPlugins_testHarvestingParameters() throws GenericMetisException { - - // Prepare correct url variables - final String simpleUrl = "http://test.com/path"; - final String urlWithFragmentAndQuery = simpleUrl + "#fragment?query=1"; - final String metadataFormat = "metadataFormatParameter"; - final String setSpec = "setSpecParameter"; - - // Create oai harvesting with all parameters - final OaipmhHarvestPluginMetadata oai = new OaipmhHarvestPluginMetadata(); - oai.setEnabled(true); - oai.setUrl(" " + urlWithFragmentAndQuery + " "); - oai.setMetadataFormat(" " + metadataFormat + " "); - oai.setSetSpec(" " + setSpec + " "); - - // Create http harvesting - final HTTPHarvestPluginMetadata http = new HTTPHarvestPluginMetadata(); - http.setEnabled(true); - http.setUrl(" " + urlWithFragmentAndQuery + " "); - - // Create the workflow and execute the method - final Workflow workflow = new Workflow(); - workflow.setDatasetId(DATASET_ID); - workflow.setMetisPluginsMetadata(Arrays.asList(oai, http)); - validationUtils.validateWorkflowPlugins(workflow, null); - - // Test output - assertEquals(simpleUrl, oai.getUrl()); - assertEquals(metadataFormat, oai.getMetadataFormat()); - assertEquals(setSpec, oai.getSetSpec()); - assertEquals(urlWithFragmentAndQuery, http.getUrl()); - - // Create oai harvesting with only url - oai.setUrl(urlWithFragmentAndQuery); - oai.setMetadataFormat(null); - oai.setSetSpec(null); - - // Create the workflow and execute the method - workflow.setMetisPluginsMetadata(Collections.singletonList(oai)); - validationUtils.validateWorkflowPlugins(workflow, null); - - // Test output - assertEquals(simpleUrl, oai.getUrl()); - assertNull(oai.getMetadataFormat()); - assertNull(oai.getSetSpec()); - - // Test OAI with invalid URL - oai.setUrl("invalid URL"); - workflow.setMetisPluginsMetadata(Collections.singletonList(oai)); - assertThrows(BadContentException.class, - () -> validationUtils.validateWorkflowPlugins(workflow, null)); - - // Test HTTP with missing URL - http.setUrl(null); - workflow.setMetisPluginsMetadata(Collections.singletonList(http)); - assertThrows(BadContentException.class, - () -> validationUtils.validateWorkflowPlugins(workflow, null)); - - // Test incremental OAI - oai.setUrl(urlWithFragmentAndQuery); - oai.setIncrementalHarvest(true); - workflow.setMetisPluginsMetadata(Collections.singletonList(oai)); - doReturn(true).when(validationUtils).isIncrementalHarvestingAllowed(DATASET_ID); - validationUtils.validateWorkflowPlugins(workflow, null); - doReturn(false).when(validationUtils).isIncrementalHarvestingAllowed(DATASET_ID); - assertThrows(BadContentException.class, - () -> validationUtils.validateWorkflowPlugins(workflow, null)); - - // Test incremental HTTP - http.setUrl(urlWithFragmentAndQuery); - http.setIncrementalHarvest(true); - workflow.setMetisPluginsMetadata(Collections.singletonList(http)); - doReturn(true).when(validationUtils).isIncrementalHarvestingAllowed(DATASET_ID); - validationUtils.validateWorkflowPlugins(workflow, null); - doReturn(false).when(validationUtils).isIncrementalHarvestingAllowed(DATASET_ID); - assertThrows(BadContentException.class, - () -> validationUtils.validateWorkflowPlugins(workflow, null)); - } - - @Test - void testIsIncrementalHarvestingAllowed() { - doReturn(List.of(new PluginWithExecutionId<>((String) null, null))) - .when(dataEvolutionUtils).getPublishedHarvestIncrements(DATASET_ID); - assertTrue(validationUtils.isIncrementalHarvestingAllowed(DATASET_ID)); - doReturn(Collections.emptyList()).when(dataEvolutionUtils) - .getPublishedHarvestIncrements(DATASET_ID); - assertFalse(validationUtils.isIncrementalHarvestingAllowed(DATASET_ID)); - doReturn(null).when(dataEvolutionUtils).getPublishedHarvestIncrements(DATASET_ID); - assertFalse(validationUtils.isIncrementalHarvestingAllowed(DATASET_ID)); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestQueueConsumer.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestQueueConsumer.java deleted file mode 100644 index 472716747e..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestQueueConsumer.java +++ /dev/null @@ -1,290 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import com.rabbitmq.client.AMQP.BasicProperties; -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.Envelope; -import com.rabbitmq.client.MessageProperties; -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin.MonitorResult; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.time.Instant; -import java.util.ArrayList; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.awaitility.Awaitility; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.redisson.api.RedissonClient; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-17 - */ -class TestQueueConsumer { - - private static SemaphoresPerPluginManager semaphoresPerPluginManager; - private static WorkflowExecutionDao workflowExecutionDao; - private static WorkflowPostProcessor workflowPostProcessor; - private static RedissonClient redissonClient; - private static Channel rabbitmqConsumerChannel; - private static Channel rabbitmqPublisherChannel; - private static WorkflowExecutionMonitor workflowExecutionMonitor; - private static WorkflowExecutorManager workflowExecutorManager; - - @BeforeAll - static void prepare() { - semaphoresPerPluginManager = new SemaphoresPerPluginManager(2); - workflowExecutionDao = Mockito.mock(WorkflowExecutionDao.class); - workflowPostProcessor = Mockito.mock(WorkflowPostProcessor.class); - workflowExecutionMonitor = Mockito.mock(WorkflowExecutionMonitor.class); - redissonClient = Mockito.mock(RedissonClient.class); - rabbitmqPublisherChannel = Mockito.mock(Channel.class); - rabbitmqConsumerChannel = Mockito.mock(Channel.class); - DpsClient dpsClient = Mockito.mock(DpsClient.class); - workflowExecutorManager = new WorkflowExecutorManager(semaphoresPerPluginManager, - workflowExecutionDao, workflowPostProcessor, rabbitmqPublisherChannel, - rabbitmqConsumerChannel, redissonClient, dpsClient); - workflowExecutorManager.setRabbitmqQueueName("ExampleQueueName"); - workflowExecutorManager.setDpsMonitorCheckIntervalInSecs(1); - workflowExecutorManager.setEcloudBaseUrl("http://universe.space"); - workflowExecutorManager.setEcloudProvider("providerExample"); - } - - @AfterEach - void cleanUp() { - Mockito.reset(workflowExecutionDao); - Mockito.reset(workflowPostProcessor); - Mockito.reset(workflowExecutionMonitor); - Mockito.reset(redissonClient); - Mockito.reset(rabbitmqPublisherChannel); - Mockito.reset(rabbitmqConsumerChannel); - } - - @Test - void initiateConsumer() throws Exception { - final String rabbitmqQueueName = "testname"; - new QueueConsumer(rabbitmqConsumerChannel, rabbitmqQueueName, workflowExecutorManager, - workflowExecutorManager, workflowExecutionMonitor); - ArgumentCaptor basicQos = ArgumentCaptor.forClass(Integer.class); - verify(rabbitmqConsumerChannel, times(1)).basicQos(basicQos.capture()); - assertEquals(Integer.valueOf(1), basicQos.getValue()); - ArgumentCaptor autoAcknowledge = ArgumentCaptor.forClass(Boolean.class); - verify(rabbitmqConsumerChannel, times(1)) - .basicConsume(eq(rabbitmqQueueName), autoAcknowledge.capture(), any(QueueConsumer.class)); - assertFalse(autoAcknowledge.getValue()); - } - - @Test - void initiateConsumerThrowsIOException() throws Exception { - final String rabbitmqQueueName = "testname"; - when(rabbitmqConsumerChannel - .basicConsume(eq(rabbitmqQueueName), anyBoolean(), any(QueueConsumer.class))) - .thenThrow(new IOException("Some Error")); - assertThrows(IOException.class, - () -> new QueueConsumer(rabbitmqConsumerChannel, rabbitmqQueueName, workflowExecutorManager, - workflowExecutorManager, workflowExecutionMonitor)); - ArgumentCaptor basicQos = ArgumentCaptor.forClass(Integer.class); - verify(rabbitmqConsumerChannel, times(1)).basicQos(basicQos.capture()); - verify(rabbitmqConsumerChannel, times(1)).basicConsume(eq(rabbitmqQueueName), eq(false), any()); - assertEquals(Integer.valueOf(1), basicQos.getValue()); - verifyNoMoreInteractions(rabbitmqConsumerChannel); - } - - @Test - void handleDelivery() throws IOException { - String objectId = new ObjectId().toString(); - int priority = 0; - Envelope envelope = new Envelope(1, false, "", ""); - BasicProperties basicProperties = MessageProperties.PERSISTENT_TEXT_PLAIN.builder() - .priority(priority).build(); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - - when(workflowExecutionMonitor.claimExecution(objectId)) - .thenReturn(new ImmutablePair<>(workflowExecution, true)); - doNothing().when(rabbitmqConsumerChannel).basicAck(envelope.getDeliveryTag(), false); - - QueueConsumer queueConsumer = new QueueConsumer(rabbitmqConsumerChannel, null, - workflowExecutorManager, workflowExecutorManager, workflowExecutionMonitor); - assertDoesNotThrow( - () -> queueConsumer.handleDelivery("1", envelope, basicProperties, objectId.getBytes(StandardCharsets.UTF_8))); - } - - @Test - void handleDeliveryExecutionThatMayNotBeClaimed() throws IOException { - - String objectId = new ObjectId().toString(); - int priority = 0; - Envelope envelope = new Envelope(1, false, "", ""); - BasicProperties basicProperties = MessageProperties.PERSISTENT_TEXT_PLAIN.builder() - .priority(priority).build(); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - - when(workflowExecutionMonitor.claimExecution(objectId)) - .thenReturn(new ImmutablePair<>(workflowExecution, false)); - doNothing().when(rabbitmqConsumerChannel).basicAck(envelope.getDeliveryTag(), false); - - QueueConsumer queueConsumer = new QueueConsumer(rabbitmqConsumerChannel, null, - workflowExecutorManager, workflowExecutorManager, workflowExecutionMonitor); - queueConsumer - .handleDelivery("1", envelope, basicProperties, objectId.getBytes(StandardCharsets.UTF_8)); - - verify(workflowExecutionMonitor, times(1)).claimExecution(any()); - verifyNoMoreInteractions(workflowExecutionMonitor); - } - - @Test - void handleDeliveryStateCancelling() throws Exception { - String objectId = new ObjectId().toString(); - int priority = 0; - Envelope envelope = new Envelope(1, false, "", ""); - BasicProperties basicProperties = MessageProperties.PERSISTENT_TEXT_PLAIN.builder() - .priority(priority).build(); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setCancelling(true); - - when(workflowExecutionMonitor.claimExecution(objectId)) - .thenReturn(new ImmutablePair<>(workflowExecution, true)); - doNothing().when(rabbitmqConsumerChannel).basicAck(envelope.getDeliveryTag(), false); - - QueueConsumer queueConsumer = new QueueConsumer(rabbitmqConsumerChannel, null, - workflowExecutorManager, workflowExecutorManager, workflowExecutionMonitor); - queueConsumer - .handleDelivery("1", envelope, basicProperties, objectId.getBytes(StandardCharsets.UTF_8)); - - verify(workflowExecutionDao, times(1)).update(workflowExecution); - } - - @Test - void handleDeliveryInterruptWhilePolling() throws Exception { - - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - ExecutionProgress processedExecutionProgress = new ExecutionProgress(); - processedExecutionProgress.setStatus(TaskState.PROCESSED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin1 = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata1 = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin1.setPluginMetadata(oaipmhHarvestPluginMetadata1); - ArrayList abstractMetisPlugins1 = new ArrayList<>(); - abstractMetisPlugins1.add(oaipmhHarvestPlugin1); - - OaipmhHarvestPlugin oaipmhHarvestPlugin2 = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata2 = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin2.setPluginMetadata(oaipmhHarvestPluginMetadata2); - ArrayList abstractMetisPlugins2 = new ArrayList<>(); - abstractMetisPlugins2.add(oaipmhHarvestPlugin2); - - OaipmhHarvestPlugin oaipmhHarvestPlugin3 = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata3 = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin3.setPluginMetadata(oaipmhHarvestPluginMetadata3); - ArrayList abstractMetisPlugins3 = new ArrayList<>(); - abstractMetisPlugins3.add(oaipmhHarvestPlugin3); - - int priority = 0; - BasicProperties basicProperties = MessageProperties.PERSISTENT_TEXT_PLAIN.builder() - .priority(priority).build(); - Envelope envelope = new Envelope(1, false, "", ""); - ObjectId objectId1 = new ObjectId(Date.from(Instant.now().minusSeconds(1))); - ObjectId objectId2 = new ObjectId(Date.from(Instant.now())); - ObjectId objectId3 = new ObjectId(Date.from(Instant.now().plusSeconds(1))); - byte[] objectIdBytes1 = objectId1.toString().getBytes(StandardCharsets.UTF_8); - byte[] objectIdBytes2 = objectId2.toString().getBytes(StandardCharsets.UTF_8); - byte[] objectIdBytes3 = objectId3.toString().getBytes(StandardCharsets.UTF_8); - WorkflowExecution workflowExecution1 = TestObjectFactory.createWorkflowExecutionObject(); - WorkflowExecution workflowExecution2 = TestObjectFactory.createWorkflowExecutionObject(); - WorkflowExecution workflowExecution3 = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution1.setId(objectId1); - workflowExecution1.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecution1.setMetisPlugins(abstractMetisPlugins1); - workflowExecution1.setStartedDate(new Date(1000)); - workflowExecution2.setId(objectId2); - workflowExecution2.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecution2.setMetisPlugins(abstractMetisPlugins2); - workflowExecution2.setStartedDate(new Date(2000)); - workflowExecution3.setId(objectId3); - workflowExecution3.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecution3.setMetisPlugins(abstractMetisPlugins3); - workflowExecution3.setStartedDate(new Date(3000)); - when(workflowExecutionMonitor.claimExecution(objectId1.toString())) - .thenReturn(new ImmutablePair<>(workflowExecution1, true)); - when(workflowExecutionMonitor.claimExecution(objectId2.toString())) - .thenReturn(new ImmutablePair<>(workflowExecution2, true)); - when(workflowExecutionMonitor.claimExecution(objectId3.toString())) - .thenReturn(new ImmutablePair<>(workflowExecution3, true)); - doNothing().when(rabbitmqConsumerChannel).basicAck(envelope.getDeliveryTag(), false); - - //For running properly the WorkflowExecution. - when(workflowExecutionMonitor.claimExecution(workflowExecution1.getId().toString())) - .thenReturn(new ImmutablePair<>(workflowExecution1, true)) - .thenReturn(new ImmutablePair<>(workflowExecution1, false)); - when(workflowExecutionMonitor.claimExecution(workflowExecution2.getId().toString())) - .thenReturn(new ImmutablePair<>(workflowExecution2, true)) - .thenReturn(new ImmutablePair<>(workflowExecution2, false)); - when(workflowExecutionMonitor.claimExecution(workflowExecution3.getId().toString())) - .thenReturn(new ImmutablePair<>(workflowExecution3, true)) - .thenReturn(new ImmutablePair<>(workflowExecution3, false)); - doNothing().when(workflowExecutionDao).updateMonitorInformation(any(WorkflowExecution.class)); - when(workflowExecutionDao.isCancelling(any(ObjectId.class))).thenReturn(false); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(processedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin1).monitor(any(DpsClient.class)); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(processedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin2).monitor(any(DpsClient.class)); - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(any(WorkflowExecution.class)); - when(workflowExecutionDao.update(any(WorkflowExecution.class))).thenReturn(anyString()); - - QueueConsumer queueConsumer = spy( - new QueueConsumer(rabbitmqConsumerChannel, null, workflowExecutorManager, - workflowExecutorManager, workflowExecutionMonitor)); - doThrow(InterruptedException.class).doCallRealMethod().when(queueConsumer) - .checkAndCleanCompletionService(); - - queueConsumer.handleDelivery("1", envelope, basicProperties, objectIdBytes1); - queueConsumer.handleDelivery("2", envelope, basicProperties, objectIdBytes2); - Awaitility.await().forever().until(() -> oaipmhHarvestPlugin1.getStartedDate() != null); - Awaitility.await().forever().until(() -> oaipmhHarvestPlugin2.getStartedDate() != null); - queueConsumer.handleDelivery("3", envelope, basicProperties, objectIdBytes3); - - assertThrows(InterruptedException.class, queueConsumer::checkAndCleanCompletionService); - queueConsumer.checkAndCleanCompletionService(); - - Awaitility.await().forever() - .until(() -> workflowExecution1.getWorkflowStatus() == WorkflowStatus.FINISHED); - Awaitility.await().forever() - .until(() -> workflowExecution2.getWorkflowStatus() == WorkflowStatus.FINISHED); - assertTrue(0 <= queueConsumer.getThreadsCounter() && queueConsumer.getThreadsCounter() <= 3); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSchedulerExecutor.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSchedulerExecutor.java deleted file mode 100644 index bf0b1d992c..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSchedulerExecutor.java +++ /dev/null @@ -1,189 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.atMost; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.service.OrchestratorService; -import eu.europeana.metis.core.service.ScheduleWorkflowService; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.mockito.Mockito; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.redisson.client.RedisConnectionException; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-17 - */ -class TestSchedulerExecutor { - - private static int periodicSchedulerCheckInSecs = 1; - private static OrchestratorService orchestratorService; - private static ScheduleWorkflowService scheduleWorkflowService; - private static RedissonClient redissonClient; - private static final String SCHEDULER_LOCK = "schedulerLock"; - - @BeforeAll - static void prepare() { - orchestratorService = Mockito.mock(OrchestratorService.class); - scheduleWorkflowService = Mockito.mock(ScheduleWorkflowService.class); - redissonClient = Mockito.mock(RedissonClient.class); - } - - @AfterEach - void cleanUp() { - Mockito.reset(orchestratorService); - Mockito.reset(scheduleWorkflowService); - Mockito.reset(redissonClient); - } - - @Test - void run() throws Exception { - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(SCHEDULER_LOCK)).thenReturn(rlock); - doNothing().when(rlock).lock(); - SchedulerExecutor schedulerExecutor = new SchedulerExecutor(orchestratorService, - scheduleWorkflowService, - redissonClient); - - int userWorkflowExecutionsPerRequest = 3; - int listSize = userWorkflowExecutionsPerRequest - 1; //To not trigger paging - Date now = new Date(); - now.setTime(now.getTime() + periodicSchedulerCheckInSecs * 1000); - List listOfScheduledWorkflowsWithDateONCE = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, now, ScheduleFrequence.ONCE); - List listOfScheduledWorkflowsWithDateDAILY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, now, ScheduleFrequence.DAILY); - List listOfScheduledWorkflowsWithDateWEEKLY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, now, - ScheduleFrequence.WEEKLY); - List listOfScheduledWorkflowsWithDateMONTHLY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, now, - ScheduleFrequence.MONTHLY); - - when(scheduleWorkflowService.getScheduledWorkflowsPerRequest()) - .thenReturn(userWorkflowExecutionsPerRequest); - - when(scheduleWorkflowService.getAllScheduledWorkflowsByDateRangeONCE(any(LocalDateTime.class), - any(LocalDateTime.class), anyInt())) - .thenReturn(listOfScheduledWorkflowsWithDateONCE); - when( - scheduleWorkflowService.getAllScheduledWorkflowsWithoutAuthorization(any(ScheduleFrequence.class), anyInt())) - .thenReturn(listOfScheduledWorkflowsWithDateDAILY).thenReturn( - listOfScheduledWorkflowsWithDateWEEKLY).thenReturn( - listOfScheduledWorkflowsWithDateMONTHLY); - when(orchestratorService.addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(anyString(), isNull(), isNull(), anyInt())) - .thenThrow(new NoDatasetFoundException("Some Error")) - .thenReturn(null); //Throw an exception as well, should continue execution after that - doNothing().when(rlock).unlock(); - - schedulerExecutor.performScheduling(); - - verify(scheduleWorkflowService, times(4)).getScheduledWorkflowsPerRequest(); - verify(scheduleWorkflowService, times(1)) - .getAllScheduledWorkflowsByDateRangeONCE(any(LocalDateTime.class), - any(LocalDateTime.class), anyInt()); - verify(scheduleWorkflowService, times(3)) - .getAllScheduledWorkflowsWithoutAuthorization(any(ScheduleFrequence.class), anyInt()); - verify(orchestratorService, atMost(listSize * 4)) - .addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(anyString(), isNull(), isNull(), anyInt()); - } - - @Test - void runAllSchedulesOutOfRange() throws Exception { - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(SCHEDULER_LOCK)).thenReturn(rlock); - doNothing().when(rlock).lock(); - SchedulerExecutor schedulerExecutor = new SchedulerExecutor(orchestratorService, - scheduleWorkflowService, - redissonClient); - - int userWorkflowExecutionsPerRequest = 3; - int listSize = userWorkflowExecutionsPerRequest - 1; //To not trigger paging - Date past = new Date(); - past.setTime(past.getTime() - periodicSchedulerCheckInSecs * 1000); - List listOfScheduledWorkflowsWithDateDAILY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, past, - ScheduleFrequence.DAILY); - List listOfScheduledWorkflowsWithDateWEEKLY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, past, - ScheduleFrequence.WEEKLY); - List listOfScheduledWorkflowsWithDateMONTHLY = TestObjectFactory - .createListOfScheduledWorkflowsWithDateAndFrequence(listSize, past, - ScheduleFrequence.MONTHLY); - - when(scheduleWorkflowService.getScheduledWorkflowsPerRequest()) - .thenReturn(userWorkflowExecutionsPerRequest); - - when(scheduleWorkflowService.getAllScheduledWorkflowsByDateRangeONCE(any(LocalDateTime.class), - any(LocalDateTime.class), anyInt())) - .thenReturn(new ArrayList<>()); - when( - scheduleWorkflowService.getAllScheduledWorkflowsWithoutAuthorization(any(ScheduleFrequence.class), anyInt())) - .thenReturn(listOfScheduledWorkflowsWithDateDAILY).thenReturn( - listOfScheduledWorkflowsWithDateWEEKLY).thenReturn( - listOfScheduledWorkflowsWithDateMONTHLY); - when(orchestratorService - .addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(anyString(), isNull(), isNull(), anyInt())) - .thenThrow(new NoDatasetFoundException("Some Error")) - .thenReturn(null); //Throw an exception as well, should continue execution after that - doNothing().when(rlock).unlock(); - - schedulerExecutor.performScheduling(); - - verify(scheduleWorkflowService, times(4)).getScheduledWorkflowsPerRequest(); - verify(scheduleWorkflowService, times(1)) - .getAllScheduledWorkflowsByDateRangeONCE(any(LocalDateTime.class), - any(LocalDateTime.class), anyInt()); - verify(scheduleWorkflowService, times(3)) - .getAllScheduledWorkflowsWithoutAuthorization(any(ScheduleFrequence.class), anyInt()); - verify(orchestratorService, times(0)) - .addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(anyString(), isNull(), isNull(), anyInt()); - } - - @Test - void runThatThrowsExceptionDuringLockAndContinues() { - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(SCHEDULER_LOCK)).thenReturn(rlock); - SchedulerExecutor schedulerExecutor = new SchedulerExecutor(orchestratorService, - scheduleWorkflowService, - redissonClient); - doNothing().when(rlock).lock(); - doThrow(new RedisConnectionException("Connection error")).when(rlock).lock(); - schedulerExecutor.performScheduling(); - verifyNoMoreInteractions(orchestratorService); - } - - @Test - void runThatThrowsExceptionDuringLockAndUnlockAndContinues() { - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(SCHEDULER_LOCK)).thenReturn(rlock); - SchedulerExecutor schedulerExecutor = new SchedulerExecutor(orchestratorService, - scheduleWorkflowService, redissonClient); - doThrow(new RedisConnectionException("Connection error")).when(rlock).lock(); - doThrow(new RedisConnectionException("Connection error")).when(rlock).unlock(); - schedulerExecutor.performScheduling(); - verify(rlock, times(1)).unlock(); - verifyNoMoreInteractions(orchestratorService); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSemaphoresPerPluginManager.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSemaphoresPerPluginManager.java deleted file mode 100644 index aec1ee8aa8..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestSemaphoresPerPluginManager.java +++ /dev/null @@ -1,52 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import org.junit.jupiter.api.Test; - -class TestSemaphoresPerPluginManager { - - @Test - void initiateAndCheckAllSemaphores() { - final SemaphoresPerPluginManager semaphoresPerPluginManager = new SemaphoresPerPluginManager(2); - - int successAcquiresCounter = 0; - for (ExecutablePluginType executablePluginType : ExecutablePluginType.values()) { - if (semaphoresPerPluginManager.tryAcquireForExecutablePluginType(executablePluginType)) { - semaphoresPerPluginManager.releaseForPluginType(executablePluginType); - successAcquiresCounter++; - } - } - - assertEquals(ExecutablePluginType.values().length, successAcquiresCounter); - } - - @Test - void checkFullSemaphore() { - final SemaphoresPerPluginManager semaphoresPerPluginManager = new SemaphoresPerPluginManager(1); - - boolean acquired = semaphoresPerPluginManager - .tryAcquireForExecutablePluginType(ExecutablePluginType.ENRICHMENT); - assertTrue(acquired); - // Same plugin should not be allowed - acquired = semaphoresPerPluginManager - .tryAcquireForExecutablePluginType(ExecutablePluginType.ENRICHMENT); - assertFalse(acquired); - // Different plugin should be allowed - acquired = semaphoresPerPluginManager - .tryAcquireForExecutablePluginType(ExecutablePluginType.NORMALIZATION); - assertTrue(acquired); - - // Release previously failed and re-acquire - semaphoresPerPluginManager.releaseForPluginType(ExecutablePluginType.ENRICHMENT); - acquired = semaphoresPerPluginManager - .tryAcquireForExecutablePluginType(ExecutablePluginType.ENRICHMENT); - assertTrue(acquired); - semaphoresPerPluginManager.releaseForPluginType(ExecutablePluginType.ENRICHMENT); - semaphoresPerPluginManager.releaseForPluginType(ExecutablePluginType.NORMALIZATION); - } - -} \ No newline at end of file diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutionMonitor.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutionMonitor.java deleted file mode 100644 index 99844b669f..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutionMonitor.java +++ /dev/null @@ -1,364 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.execution.WorkflowExecutionMonitor.WorkflowExecutionEntry; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import java.time.Duration; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.InOrder; -import org.mockito.Mockito; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.redisson.client.RedisConnectionException; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-16 - */ -class TestWorkflowExecutionMonitor { - - private static final String FAILSAFE_LOCK = "failsafeLock"; - - private static WorkflowExecutionDao workflowExecutionDao; - private static WorkflowExecutorManager workflowExecutorManager; - private static RedissonClient redissonClient; - private static RLock lock; - - @BeforeAll - static void prepare() { - workflowExecutionDao = Mockito.mock(WorkflowExecutionDao.class); - workflowExecutorManager = Mockito.mock(WorkflowExecutorManager.class); - redissonClient = Mockito.mock(RedissonClient.class); - lock = mock(RLock.class); - } - - @BeforeEach - void setUp() { - when(redissonClient.getFairLock(FAILSAFE_LOCK)).thenReturn(lock); - doNothing().when(lock).lock(); - doNothing().when(lock).unlock(); - } - - @AfterEach - void cleanUp() { - Mockito.reset(workflowExecutorManager); - Mockito.reset(redissonClient); - Mockito.reset(workflowExecutionDao); - Mockito.reset(lock); - } - - @Test - void testFailSafe() { - - // Create workflow executions - final long updatedTime = 0; - final Instant id1 = Instant.now(); - final Instant id2 = id1.minusSeconds(1); - final Instant id3 = id2.minusSeconds(1); - final Instant id4 = id3.minusSeconds(1); - final WorkflowExecution workflowExecution1 = createWorkflowExecution(id1, - new Date(updatedTime)); - final WorkflowExecution workflowExecution2 = createWorkflowExecution(id2, - new Date(updatedTime)); - final WorkflowExecution workflowExecution3 = createWorkflowExecution(id3, - new Date(updatedTime)); - final WorkflowExecution workflowExecution4 = createWorkflowExecution(id4, - new Date(updatedTime)); - - // Create monitor - final Duration leniency = Duration.ofSeconds(10); - final WorkflowExecutionMonitor monitor = Mockito.spy( - new WorkflowExecutionMonitor(workflowExecutorManager, workflowExecutionDao, redissonClient, - leniency)); - - // Mock the get entry: 1 is normal, 2 is hanging and 3 and 4 are null. - WorkflowExecutionEntry entry1 = mock(WorkflowExecutionEntry.class); - doReturn(false).when(entry1).assumeHanging(leniency); - WorkflowExecutionEntry entry2 = mock(WorkflowExecutionEntry.class); - doReturn(true).when(entry2).assumeHanging(leniency); - doReturn(entry1).when(monitor).getEntry(workflowExecution1); - doReturn(entry2).when(monitor).getEntry(workflowExecution2); - doReturn(null).when(monitor).getEntry(workflowExecution3); - doReturn(null).when(monitor).getEntry(workflowExecution4); - - // Mock the retrieval of executions: 1, 2 and 3 are running, 4 is in queue. - doReturn(Arrays.asList(workflowExecution1, workflowExecution2, workflowExecution3)) - .when(monitor).updateCurrentRunningExecutions(); - when(workflowExecutionDao - .getAllWorkflowExecutions(isNull(), eq(EnumSet.of(WorkflowStatus.INQUEUE)), any(), - anyBoolean(), eq(0), eq(1), eq(true))) - .thenReturn(new ResultList<>(Collections.singletonList(workflowExecution4), false)); - when(workflowExecutionDao.getWorkflowExecutionsPerRequest()).thenReturn(4); - - // Perform method and verify the requeued executions - monitor.performFailsafe(); - verify(workflowExecutorManager, times(1)) - .addWorkflowExecutionToQueue(eq(workflowExecution2.getId().toString()), - eq(workflowExecution2.getWorkflowPriority())); - verify(workflowExecutorManager, times(1)) - .addWorkflowExecutionToQueue(eq(workflowExecution3.getId().toString()), - eq(workflowExecution3.getWorkflowPriority())); - verify(workflowExecutorManager, times(1)) - .addWorkflowExecutionToQueue(eq(workflowExecution4.getId().toString()), - eq(workflowExecution4.getWorkflowPriority())); - verifyNoMoreInteractions(workflowExecutorManager); - - // Verify calls that need to be locked. - InOrder inOrder = Mockito.inOrder(lock, workflowExecutorManager, workflowExecutionDao, monitor); - inOrder.verify(monitor).performFailsafe(); - inOrder.verify(lock).lock(); - inOrder.verify(monitor, times(1)).updateCurrentRunningExecutions(); - inOrder.verify(workflowExecutionDao, times(1)) - .getAllWorkflowExecutions(isNull(), eq(EnumSet.of(WorkflowStatus.INQUEUE)), any(), - anyBoolean(), eq(0), eq(1), eq(true)); - inOrder.verify(lock).unlock(); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void testFailSafeThatThrowsExceptionDuringLockAndContinues() { - WorkflowExecutionMonitor monitor = new WorkflowExecutionMonitor(workflowExecutorManager, - workflowExecutionDao, redissonClient, Duration.ofHours(1)); - doThrow(new RedisConnectionException("Connection error")).when(lock).lock(); - monitor.performFailsafe(); - verifyNoMoreInteractions(workflowExecutorManager); - } - - @Test - void testFailSafeThatThrowsExceptionDuringLockAndUnlockAndContinues() { - WorkflowExecutionMonitor monitor = new WorkflowExecutionMonitor(workflowExecutorManager, - workflowExecutionDao, redissonClient, Duration.ofHours(1)); - doThrow(new RedisConnectionException("Connection error")).when(lock).lock(); - doThrow(new RedisConnectionException("Connection error")).when(lock).unlock(); - monitor.performFailsafe(); - verify(lock, times(1)).unlock(); - verifyNoMoreInteractions(workflowExecutorManager); - } - - @Test - void testClaimExecution() { - - // Create monitor. - WorkflowExecutionMonitor monitor = Mockito.spy( - new WorkflowExecutionMonitor(workflowExecutorManager, workflowExecutionDao, redissonClient, - Duration.ofHours(1))); - - // Create workflow execution - final WorkflowExecution workflowExecution = createWorkflowExecution(Instant.now(), null); - final String id = workflowExecution.getId().toString(); - doReturn(workflowExecution).when(workflowExecutionDao).getById(id); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - - // Test when claim succeeds on execution in queue. - // Note: Don't use Instant.now(): date must be created in the same way to have same precision. - doReturn(true).when(monitor).mayClaimExecution(workflowExecution); - assertSame(workflowExecution, monitor.claimExecution(id).getLeft()); - - InOrder inOrder = Mockito - .inOrder(lock, redissonClient, workflowExecutorManager, workflowExecutionDao, monitor); - inOrder.verify(monitor, times(1)).claimExecution(any()); - inOrder.verify(lock, times(1)).lock(); - inOrder.verify(workflowExecutionDao, times(1)).getById(id); - inOrder.verify(monitor, times(1)).mayClaimExecution(workflowExecution); - inOrder.verify(lock, times(1)).unlock(); - inOrder.verifyNoMoreInteractions(); - - assertSame(workflowExecution, monitor.claimExecution(id).getLeft()); - // Test when claim fails. - doReturn(false).when(monitor).mayClaimExecution(workflowExecution); - assertFalse(monitor.claimExecution(id).getRight()); - } - - @Test - void testClaimExecutionWithException() { - final String id = "id"; - WorkflowExecutionMonitor monitor = Mockito.spy( - new WorkflowExecutionMonitor(workflowExecutorManager, workflowExecutionDao, redissonClient, - Duration.ofHours(1))); - doThrow(new RuntimeException("Test exception")).when(workflowExecutionDao).getById(id); - assertNull(monitor.claimExecution(id).getLeft()); - verify(lock, times(1)).unlock(); - } - - @Test - void testEntry() { - - // Create new entry. - final long updateTime = 0; - final Instant begin = Instant.now(); - final WorkflowExecutionEntry entry = spy(new WorkflowExecutionEntry(new Date(updateTime))); - final Instant end = Instant.now(); - - // Test value of last value change. - assertFalse(entry.getLastValueChange().isAfter(end)); - assertFalse(entry.getLastValueChange().isBefore(begin)); - - // Test comparison of update time - assertTrue(entry.updateTimeValueIsEqual(new Date(updateTime))); - assertFalse(entry.updateTimeValueIsEqual(new Date(updateTime + 1))); - assertFalse(entry.updateTimeValueIsEqual(null)); - - // Test decision on whether it is hanging - final Duration leniency = Duration.ofSeconds(1); - final Instant now = Instant.now(); - doReturn(now).when(entry).getLastValueChange(); - - doReturn(now).when(entry).getNow(); - assertFalse(entry.assumeHanging(leniency)); - - doReturn(now.minus(leniency)).when(entry).getNow(); - assertFalse(entry.assumeHanging(leniency)); - - doReturn(now.plus(leniency)).when(entry).getNow(); - assertFalse(entry.assumeHanging(leniency)); - - doReturn(now.plus(leniency).plus(leniency)).when(entry).getNow(); - assertTrue(entry.assumeHanging(leniency)); - - // Test with a null update time. - final WorkflowExecutionEntry entryWithoutUpdateTime = new WorkflowExecutionEntry(null); - assertFalse(entryWithoutUpdateTime.updateTimeValueIsEqual(new Date())); - assertTrue(entryWithoutUpdateTime.updateTimeValueIsEqual(null)); - } - - @Test - void testMayClaimExecution() { - - // Create monitor - final Duration leniency = Duration.ofSeconds(10); - final WorkflowExecutionMonitor monitor = spy( - new WorkflowExecutionMonitor(workflowExecutorManager, workflowExecutionDao, redissonClient, - leniency)); - - // Create workflow execution - final WorkflowExecution workflowExecution = createWorkflowExecution(Instant.now(), null); - - // Checks of non-RUNNING executions - workflowExecution.setWorkflowStatus(WorkflowStatus.CANCELLED); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - workflowExecution.setWorkflowStatus(WorkflowStatus.FAILED); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - assertTrue(monitor.mayClaimExecution(workflowExecution)); - - // Now check for running executions. - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - - // Check when entry is not there. - doReturn(null).when(monitor).getEntry(workflowExecution); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - - // Check when entry has changed - final Date updateTime = new Date(0); - workflowExecution.setUpdatedDate(updateTime); - final WorkflowExecutionEntry entry = spy(new WorkflowExecutionEntry(null)); - doReturn(entry).when(monitor).getEntry(workflowExecution); - doReturn(false).when(entry).updateTimeValueIsEqual(updateTime); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - - // Check when entry is hanging - doReturn(true).when(entry).updateTimeValueIsEqual(updateTime); - doReturn(false).when(entry).assumeHanging(leniency); - assertFalse(monitor.mayClaimExecution(workflowExecution)); - - // Check when all is well - doReturn(true).when(entry).assumeHanging(leniency); - assertTrue(monitor.mayClaimExecution(workflowExecution)); - } - - @Test - void testUpdateCurrentExecutions() { - - // Create workflow executions - final long updatedTime = 0; - final Instant id1 = Instant.now(); - final Instant id2 = id1.minusSeconds(1); - final Instant id3 = id2.minusSeconds(1); - final Instant id4 = id3.minusSeconds(1); - final WorkflowExecution workflowExecution1 = createWorkflowExecution(id1, - new Date(updatedTime)); - final WorkflowExecution workflowExecution2 = createWorkflowExecution(id2, - new Date(updatedTime)); - final WorkflowExecution workflowExecution3 = createWorkflowExecution(id3, - new Date(updatedTime)); - final WorkflowExecution workflowExecution4 = createWorkflowExecution(id4, - new Date(updatedTime)); - - // Create monitor - final WorkflowExecutionMonitor monitor = spy( - new WorkflowExecutionMonitor(workflowExecutorManager, workflowExecutionDao, redissonClient, - Duration.ofSeconds(10))); - - // Set base for current executions: include workflows 1, 2 and 3. Verify. - doReturn(Arrays.asList(workflowExecution1, workflowExecution2, workflowExecution3)) - .when(monitor).getWorkflowExecutionsWithStatus(WorkflowStatus.RUNNING); - monitor.updateCurrentRunningExecutions(); - - // Check that all data was processed correctly. - final WorkflowExecutionEntry executionRecord1 = monitor.getEntry(workflowExecution1); - assertNotNull(executionRecord1); - assertTrue(executionRecord1.updateTimeValueIsEqual(new Date(updatedTime))); - assertNotNull(monitor.getEntry(workflowExecution2)); - assertTrue(monitor.getEntry(workflowExecution2).updateTimeValueIsEqual(new Date(updatedTime))); - assertNotNull(monitor.getEntry(workflowExecution3)); - assertTrue(monitor.getEntry(workflowExecution3).updateTimeValueIsEqual(new Date(updatedTime))); - assertNull(monitor.getEntry(workflowExecution4)); - - // Change the current executions: include workflows 1 (updated), 2 (unchanged) and 4. - final long newUpdatedTime = updatedTime + 1; - workflowExecution1.setUpdatedDate(new Date(newUpdatedTime)); - doReturn(Arrays.asList(workflowExecution1, workflowExecution2, workflowExecution4)) - .when(monitor).getWorkflowExecutionsWithStatus(WorkflowStatus.RUNNING); - monitor.updateCurrentRunningExecutions(); - - // Check that all data was processed correctly. - assertNotNull(monitor.getEntry(workflowExecution1)); - assertTrue( - monitor.getEntry(workflowExecution1).updateTimeValueIsEqual(new Date(newUpdatedTime))); - assertNotNull(monitor.getEntry(workflowExecution2)); - assertTrue(monitor.getEntry(workflowExecution2).updateTimeValueIsEqual(new Date(updatedTime))); - assertNull(monitor.getEntry(workflowExecution3)); - assertNotNull(monitor.getEntry(workflowExecution4)); - assertTrue(monitor.getEntry(workflowExecution4).updateTimeValueIsEqual(new Date(updatedTime))); - } - - private static WorkflowExecution createWorkflowExecution(Instant id, Date updatedDate) { - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId(Date.from(id))); - workflowExecution.setUpdatedDate(updatedDate); - return workflowExecution; - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutor.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutor.java deleted file mode 100644 index 7567abbced..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutor.java +++ /dev/null @@ -1,541 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.notNull; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.atMost; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.SystemId; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.DpsTaskSettings; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin.MonitorResult; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.UnrecoverableExternalTaskException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InOrder; -import org.mockito.Mockito; -import org.springframework.http.HttpStatus; -import org.springframework.web.client.HttpServerErrorException; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-17 - */ -class TestWorkflowExecutor { - - private static WorkflowExecutionDao workflowExecutionDao; - private static WorkflowPostProcessor workflowPostProcessor; - private static DpsClient dpsClient; - private static WorkflowExecutionMonitor workflowExecutionMonitor; - private static PersistenceProvider persistenceProvider; - private static WorkflowExecutionSettings workflowExecutionSettings; - - @BeforeAll - static void prepare() { - workflowExecutionDao = Mockito.mock(WorkflowExecutionDao.class); - workflowPostProcessor = Mockito.mock(WorkflowPostProcessor.class); - dpsClient = Mockito.mock(DpsClient.class); - workflowExecutionMonitor = Mockito.mock(WorkflowExecutionMonitor.class); - persistenceProvider = new PersistenceProvider(null, null, new SemaphoresPerPluginManager(2), - workflowExecutionDao, workflowPostProcessor, null, dpsClient); - workflowExecutionSettings = Mockito.mock(WorkflowExecutionSettings.class); - when(workflowExecutionSettings.getPeriodOfNoProcessedRecordsChangeInMinutes()).thenReturn(10); - } - - @AfterEach - void cleanUp() { - Mockito.reset(workflowExecutionDao); - Mockito.reset(workflowPostProcessor); - Mockito.reset(workflowExecutionMonitor); - Mockito.reset(dpsClient); - } - - @Test - void callNonMockedFieldValue() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - ExecutionProgress processedExecutionProgress = new ExecutionProgress(); - processedExecutionProgress.setStatus(TaskState.PROCESSED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(processedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - doReturn(currentlyProcessingExecutionProgress).doReturn(processedExecutionProgress) - .when(oaipmhHarvestPlugin).getExecutionProgress(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(2)).updateMonitorInformation(workflowExecution); - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - InOrder inOrderForPlugin = inOrder(oaipmhHarvestPlugin); - inOrderForPlugin.verify(oaipmhHarvestPlugin, times(2)) - .setPluginStatusAndResetFailMessage(PluginStatus.RUNNING); - inOrderForPlugin.verify(oaipmhHarvestPlugin) - .setPluginStatusAndResetFailMessage(PluginStatus.FINISHED); - verify(oaipmhHarvestPlugin, atMost(5)).setPluginStatusAndResetFailMessage(any()); - verify(oaipmhHarvestPlugin, never()).setFailMessage(anyString()); - } - - @Test - void callNonMockedFieldValue_ExceptionWhenExecuteIsCalled() throws Exception { - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - - doThrow(new ExternalTaskException("Some error")).when(oaipmhHarvestPlugin) - .execute(any(String.class), any(DpsClient.class), any(DpsTaskSettings.class)); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - verify(oaipmhHarvestPlugin).setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - verify(oaipmhHarvestPlugin, atMost(1)).setPluginStatusAndResetFailMessage(any()); - verify(oaipmhHarvestPlugin).setFailMessage(notNull()); - verify(oaipmhHarvestPlugin, times(1)).setFailMessage(anyString()); - } - - @Test - void callNonMockedFieldValue_DROPPEDExeternalTaskButNotCancelled() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - ExecutionProgress droppedExecutionProgress = new ExecutionProgress(); - droppedExecutionProgress.setStatus(TaskState.DROPPED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(droppedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - doReturn(currentlyProcessingExecutionProgress).doReturn(droppedExecutionProgress) - .when(oaipmhHarvestPlugin).getExecutionProgress(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(2)).updateMonitorInformation(workflowExecution); - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - InOrder inOrderForPlugin = inOrder(oaipmhHarvestPlugin); - inOrderForPlugin.verify(oaipmhHarvestPlugin, times(2)) - .setPluginStatusAndResetFailMessage(PluginStatus.RUNNING); - inOrderForPlugin.verify(oaipmhHarvestPlugin) - .setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - verify(oaipmhHarvestPlugin, atMost(5)).setPluginStatusAndResetFailMessage(any()); - verify(oaipmhHarvestPlugin).setFailMessage(notNull()); - verify(oaipmhHarvestPlugin, times(1)).setFailMessage(anyString()); - } - - @Test - void callNonMockedFieldValue_ConsecutiveMonitorFailures() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - - Throwable[] dpsException100Times = new Throwable[100]; - Arrays.setAll(dpsException100Times, index -> new ExternalTaskException("Some error")); - doThrow(dpsException100Times) - .doReturn(new MonitorResult(TaskState.PROCESSED, null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - - doReturn(currentlyProcessingExecutionProgress).when(oaipmhHarvestPlugin).getExecutionProgress(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - when(workflowExecutionSettings.getDpsMonitorCheckIntervalInSecs()).thenReturn(0); - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - verify(oaipmhHarvestPlugin).setPluginStatusAndResetFailMessage(PluginStatus.FINISHED); - verify(oaipmhHarvestPlugin, atLeastOnce()).setPluginStatusAndResetFailMessage(PluginStatus.PENDING); - verify(oaipmhHarvestPlugin, never()).setFailMessage(notNull()); - } - - @Test - void callNonMockedFieldValue_MonitorFailsOnUnrecoverableExternalTaskException() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - doThrow(new UnrecoverableExternalTaskException("Check progress failed!", new Exception("Some error"))) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - doReturn(currentlyProcessingExecutionProgress).when(oaipmhHarvestPlugin).getExecutionProgress(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - verify(oaipmhHarvestPlugin).setPluginStatusAndResetFailMessage(PluginStatus.FAILED); - verify(oaipmhHarvestPlugin, atMost(1)).setPluginStatusAndResetFailMessage(any()); - verify(oaipmhHarvestPlugin).setFailMessage(notNull()); - verify(oaipmhHarvestPlugin, times(1)).setFailMessage(anyString()); - } - - @Test - void callNonMockedFieldValue_ReachPendingState_and_then_finish() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - ExecutionProgress processedExecutionProgress = new ExecutionProgress(); - processedExecutionProgress.setStatus(TaskState.PROCESSED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - doReturn(oaipmhHarvestPluginMetadata).when(oaipmhHarvestPlugin).getPluginMetadata(); - final ExternalTaskException exception = new ExternalTaskException("Some error", - new HttpServerErrorException(HttpStatus.BAD_GATEWAY)); - final ExternalTaskException[] externalTaskExceptions = new ExternalTaskException[WorkflowExecutor.MAX_CANCEL_OR_MONITOR_FAILURES]; - Arrays.fill(externalTaskExceptions, exception); - doThrow(externalTaskExceptions) - .doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(processedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - doReturn(currentlyProcessingExecutionProgress).doReturn(processedExecutionProgress) - .when(oaipmhHarvestPlugin).getExecutionProgress(); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - InOrder inOrderForPlugin = inOrder(oaipmhHarvestPlugin); - inOrderForPlugin.verify(oaipmhHarvestPlugin, times(1)) - .setPluginStatusAndResetFailMessage(PluginStatus.PENDING); - inOrderForPlugin.verify(oaipmhHarvestPlugin, times(2)) - .setPluginStatusAndResetFailMessage(PluginStatus.RUNNING); - inOrderForPlugin.verify(oaipmhHarvestPlugin, times(1)) - .setPluginStatusAndResetFailMessage(PluginStatus.FINISHED); - verify(oaipmhHarvestPlugin, atMost(6)).setPluginStatusAndResetFailMessage(any()); - verify(oaipmhHarvestPlugin, never()).setFailMessage(anyString()); - } - - - @Test - void callNonMockedFieldValueCancellingState() throws Exception { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - ExecutionProgress processedExecutionProgress = new ExecutionProgress(); - processedExecutionProgress.setStatus(TaskState.PROCESSED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(new Date()); - - when(oaipmhHarvestPlugin.getPluginMetadata()).thenReturn(oaipmhHarvestPluginMetadata); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .doReturn(new MonitorResult(processedExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - doReturn(currentlyProcessingExecutionProgress).doReturn(processedExecutionProgress) - .when(oaipmhHarvestPlugin).getExecutionProgress(); - doNothing().when(oaipmhHarvestPlugin) - .cancel(dpsClient, SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name()); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false) - .thenReturn(true); - when(workflowExecutionDao.getById(workflowExecution.getId().toString())) - .thenReturn(workflowExecution); - - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - verify(workflowExecutionDao, times(2)).updateMonitorInformation(workflowExecution); - verify(workflowExecutionDao, times(1)).update(workflowExecution); - - verify(oaipmhHarvestPlugin, never()).setFailMessage(anyString()); - } - - @Test - void callExecutionInRUNNINGState() throws ExternalTaskException, UnrecoverableExternalTaskException { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.CURRENTLY_PROCESSING); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(OaipmhHarvestPlugin.class); - oaipmhHarvestPlugin.setPluginStatus(PluginStatus.FINISHED); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - oaipmhHarvestPlugin.setStartedDate(new Date()); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setStartedDate(oaipmhHarvestPlugin.getStartedDate()); - - when(oaipmhHarvestPlugin.getPluginMetadata()).thenReturn(oaipmhHarvestPluginMetadata); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - when(oaipmhHarvestPlugin.getExecutionProgress()) - .thenReturn(currentlyProcessingExecutionProgress); - - doNothing().when(workflowExecutionDao).updateMonitorInformation(workflowExecution); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(false); - when(oaipmhHarvestPlugin.monitor(dpsClient)) - .thenReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .thenReturn(new MonitorResult(TaskState.PROCESSED, null)); - doNothing().when(workflowExecutionDao).updateWorkflowPlugins(workflowExecution); - when(workflowExecutionDao.update(workflowExecution)) - .thenReturn(workflowExecution.getId().toString()); - when(workflowExecutionDao.getById(anyString())).thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - assertEquals(WorkflowStatus.FINISHED, workflowExecution.getWorkflowStatus()); - assertNotNull(workflowExecution.getStartedDate()); - assertNotNull(workflowExecution.getUpdatedDate()); - assertNotNull(workflowExecution.getFinishedDate()); - assertNotNull(workflowExecution.getMetisPlugins().get(0).getFinishedDate()); - } - - @Test - void callCancellingStateINQUEUE() throws ExternalTaskException, UnrecoverableExternalTaskException { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.DROPPED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(new OaipmhHarvestPlugin()); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - final ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(objectId); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setCancelledBy(SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name()); - - when(workflowExecutionMonitor.claimExecution(workflowExecution.getId().toString())) - .thenReturn(new ImmutablePair<>(workflowExecution, true)); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(true); - doNothing().when(oaipmhHarvestPlugin) - .cancel(dpsClient, SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name()); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - when(workflowExecutionDao.getById(workflowExecution.getId().toString())) - .thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - ArgumentCaptor workflowExecutionArgumentCaptor = ArgumentCaptor - .forClass(WorkflowExecution.class); - verify(workflowExecutionDao, times(1)).update(workflowExecutionArgumentCaptor.capture()); - assertEquals(WorkflowStatus.CANCELLED, - workflowExecutionArgumentCaptor.getValue().getWorkflowStatus()); - assertEquals(SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name(), - workflowExecutionArgumentCaptor.getValue().getCancelledBy()); - } - - @Test - void callCancellingStateRUNNING() throws ExternalTaskException, UnrecoverableExternalTaskException { - ExecutionProgress currentlyProcessingExecutionProgress = new ExecutionProgress(); - currentlyProcessingExecutionProgress.setStatus(TaskState.DROPPED); - - OaipmhHarvestPlugin oaipmhHarvestPlugin = Mockito.spy(new OaipmhHarvestPlugin()); - oaipmhHarvestPlugin.setPluginStatus(PluginStatus.RUNNING); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPlugin.setPluginMetadata(oaipmhHarvestPluginMetadata); - ArrayList abstractMetisPlugins = new ArrayList<>(); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - final ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - workflowExecution.setId(objectId); - workflowExecution.setMetisPlugins(abstractMetisPlugins); - workflowExecution.setWorkflowStatus(WorkflowStatus.RUNNING); - workflowExecution.setCancelledBy(SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name()); - - when(workflowExecutionMonitor.claimExecution(workflowExecution.getId().toString())) - .thenReturn(new ImmutablePair<>(workflowExecution, true)); - when(workflowExecutionDao.isCancelling(workflowExecution.getId())).thenReturn(true); - doNothing().when(oaipmhHarvestPlugin) - .cancel(dpsClient, SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name()); - doReturn(new MonitorResult(currentlyProcessingExecutionProgress.getStatus(), null)) - .when(oaipmhHarvestPlugin).monitor(dpsClient); - when(workflowExecutionDao.getById(workflowExecution.getId().toString())) - .thenReturn(workflowExecution); - - WorkflowExecutor workflowExecutor = new WorkflowExecutor(workflowExecution, persistenceProvider, - workflowExecutionSettings); - workflowExecutor.call(); - - ArgumentCaptor workflowExecutionArgumentCaptor = ArgumentCaptor - .forClass(WorkflowExecution.class); - verify(workflowExecutionDao, times(1)).update(workflowExecutionArgumentCaptor.capture()); - assertEquals(WorkflowStatus.CANCELLED, - workflowExecutionArgumentCaptor.getValue().getWorkflowStatus()); - assertEquals(SystemId.SYSTEM_MINUTE_CAP_EXPIRE.name(), - workflowExecutionArgumentCaptor.getValue().getCancelledBy()); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutorManager.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutorManager.java deleted file mode 100644 index 0ebd98309f..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/execution/TestWorkflowExecutorManager.java +++ /dev/null @@ -1,90 +0,0 @@ -package eu.europeana.metis.core.execution; - -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.rabbitmq.client.AMQP; -import com.rabbitmq.client.Channel; -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.redisson.api.RedissonClient; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-17 - */ -class TestWorkflowExecutorManager { - - private static WorkflowExecutionDao workflowExecutionDao; - private static WorkflowPostProcessor workflowPostProcessor; - private static RedissonClient redissonClient; - private static Channel rabbitmqPublisherChannel; - private static Channel rabbitmqConsumerChannel; - private static WorkflowExecutorManager workflowExecutorManager; - - @BeforeAll - static void prepare() { - SemaphoresPerPluginManager semaphoresPerPluginManager = Mockito - .mock(SemaphoresPerPluginManager.class); - workflowExecutionDao = Mockito.mock(WorkflowExecutionDao.class); - workflowPostProcessor = Mockito.mock(WorkflowPostProcessor.class); - redissonClient = Mockito.mock(RedissonClient.class); - rabbitmqPublisherChannel = Mockito.mock(Channel.class); - rabbitmqConsumerChannel = Mockito.mock(Channel.class); - DpsClient dpsClient = Mockito.mock(DpsClient.class); - workflowExecutorManager = new WorkflowExecutorManager(semaphoresPerPluginManager, - workflowExecutionDao, workflowPostProcessor, rabbitmqPublisherChannel, - rabbitmqConsumerChannel, redissonClient, dpsClient); - workflowExecutorManager.setRabbitmqQueueName("ExampleQueueName"); - workflowExecutorManager.setDpsMonitorCheckIntervalInSecs(5); - workflowExecutorManager.setEcloudBaseUrl("http://universe.space"); - workflowExecutorManager.setEcloudProvider("providerExample"); - assertEquals(5, workflowExecutorManager.getDpsMonitorCheckIntervalInSecs()); - } - - @AfterEach - void cleanUp() { - Mockito.reset(workflowExecutionDao); - Mockito.reset(workflowPostProcessor); - Mockito.reset(redissonClient); - Mockito.reset(rabbitmqPublisherChannel); - Mockito.reset(rabbitmqConsumerChannel); - } - - @Test - void addUserWorkflowExecutionToQueue() throws Exception { - String objectId = new ObjectId().toString(); - int priority = 0; - workflowExecutorManager.addWorkflowExecutionToQueue(objectId, priority); - ArgumentCaptor byteArrayArgumentCaptor = ArgumentCaptor.forClass(byte[].class); - verify(rabbitmqPublisherChannel, times(1)) - .basicPublish(anyString(), anyString(), any(AMQP.BasicProperties.class), - byteArrayArgumentCaptor.capture()); - assertArrayEquals(objectId.getBytes(StandardCharsets.UTF_8), - byteArrayArgumentCaptor.getValue()); - } - - @Test - void addUserWorkflowExecutionToQueueThrowsIOException() throws Exception { - String objectId = new ObjectId().toString(); - int priority = 0; - doThrow(new IOException("Some Error")).when(rabbitmqPublisherChannel) - .basicPublish(anyString(), anyString(), any(AMQP.BasicProperties.class), - any(byte[].class)); - assertDoesNotThrow(() -> workflowExecutorManager.addWorkflowExecutionToQueue(objectId, priority)); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestAuthorizer.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestAuthorizer.java deleted file mode 100644 index b8268b3329..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestAuthorizer.java +++ /dev/null @@ -1,184 +0,0 @@ -package eu.europeana.metis.core.service; - -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; - -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.exception.UserUnauthorizedException; -import java.util.function.Function; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.BeforeEach; - -class TestAuthorizer { - - private DatasetDao datasetDao; - private Authorizer authorizer; - - @BeforeEach - void prepare() { - datasetDao = mock(DatasetDao.class); - authorizer = new Authorizer(datasetDao); - } - - private Dataset createAndRegisterDataset() { - final Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - doReturn(dataset).when(datasetDao).getDatasetByDatasetId(dataset.getDatasetId()); - doReturn(dataset).when(datasetDao).getDatasetByDatasetName(dataset.getDatasetName()); - return dataset; - } - - private static MetisUserView createUser(AccountRole accountRole) { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - doReturn(accountRole).when(metisUserView).getAccountRole(); - return metisUserView; - } - - private static MetisUserView createUserForDataset(AccountRole accountRole, Dataset dataset) { - final MetisUserView metisUserView = createUser(accountRole); - doReturn(dataset.getOrganizationId()).when(metisUserView).getOrganizationId(); - return metisUserView; - } - - private static MetisUserView createUserNotForDataset(AccountRole accountRole, Dataset dataset) { - final MetisUserView metisUserView = createUser(accountRole); - doReturn("not_" + dataset.getOrganizationId()).when(metisUserView).getOrganizationId(); - return metisUserView; - } - - private static void expectUnauthorizedException(TestAction action) - throws NoDatasetFoundException { - try { - action.test(); - fail(""); - } catch (UserUnauthorizedException e) { - // Success - } - } - - private static void expectNoDatasetFoundException(TestAction action) - throws UserUnauthorizedException { - try { - action.test(); - fail(""); - } catch (NoDatasetFoundException e) { - // Success - } - } - - @Test - void testGetAllDatasets() throws UserUnauthorizedException, NoDatasetFoundException { - authorizer.authorizeReadAllDatasets(createUser(AccountRole.METIS_ADMIN)); - authorizer.authorizeReadAllDatasets(createUser(AccountRole.EUROPEANA_DATA_OFFICER)); - expectUnauthorizedException(() -> authorizer.authorizeReadAllDatasets(createUser(AccountRole.PROVIDER_VIEWER))); - expectUnauthorizedException(() -> authorizer.authorizeReadAllDatasets(createUser(null))); - expectUnauthorizedException(() -> authorizer.authorizeReadAllDatasets(null)); - } - - @Test - void testCreatingDefaultXslt() throws UserUnauthorizedException, NoDatasetFoundException { - authorizer.authorizeWriteDefaultXslt(createUser(AccountRole.METIS_ADMIN)); - expectUnauthorizedException(() -> authorizer.authorizeWriteDefaultXslt(createUser(AccountRole.EUROPEANA_DATA_OFFICER))); - expectUnauthorizedException(() -> authorizer.authorizeWriteDefaultXslt(createUser(AccountRole.PROVIDER_VIEWER))); - expectUnauthorizedException(() -> authorizer.authorizeWriteDefaultXslt(createUser(null))); - expectUnauthorizedException(() -> authorizer.authorizeWriteDefaultXslt(null)); - } - - @Test - void testCreatingNewDataset() throws UserUnauthorizedException, NoDatasetFoundException { - authorizer.authorizeWriteNewDataset(createUser(AccountRole.METIS_ADMIN)); - authorizer.authorizeWriteNewDataset(createUser(AccountRole.EUROPEANA_DATA_OFFICER)); - expectUnauthorizedException(() -> authorizer.authorizeWriteNewDataset(createUser(AccountRole.PROVIDER_VIEWER))); - expectUnauthorizedException(() -> authorizer.authorizeWriteNewDataset(createUser(null))); - expectUnauthorizedException(() -> authorizer.authorizeWriteNewDataset(null)); - } - - @Test - void testExistingDataset() throws UserUnauthorizedException, NoDatasetFoundException { - testExistingDataset(authorizer::authorizeWriteExistingDatasetById, Dataset::getDatasetId, - false); - testExistingDataset(authorizer::authorizeReadExistingDatasetByName, Dataset::getDatasetName, - true); - testExistingDataset(authorizer::authorizeReadExistingDatasetById, Dataset::getDatasetId, true); - } - - private void testExistingDataset(ExistingDatasetAuthorizer authorizeAction, - Function getDatasetProperty, boolean allowRead) - throws UserUnauthorizedException, NoDatasetFoundException { - - // Create dataset - final Dataset dataset = createAndRegisterDataset(); - - // Test successful authentications - final Dataset result1 = authorizeAction.authorize( - createUserForDataset(AccountRole.METIS_ADMIN, dataset), getDatasetProperty.apply(dataset)); - assertSame(dataset, result1); - final Dataset result2 = - authorizeAction.authorize(createUserNotForDataset(AccountRole.METIS_ADMIN, dataset), - getDatasetProperty.apply(dataset)); - assertSame(dataset, result2); - final Dataset result3 = - authorizeAction.authorize(createUserForDataset(AccountRole.EUROPEANA_DATA_OFFICER, dataset), - getDatasetProperty.apply(dataset)); - assertSame(dataset, result3); - if (allowRead) { - final Dataset result4 = - authorizeAction.authorize(createUserForDataset(AccountRole.PROVIDER_VIEWER, dataset), - getDatasetProperty.apply(dataset)); - assertSame(dataset, result4); - } - - // Test unsuccesful authentications - expectUnauthorizedException(() -> authorizeAction.authorize( - createUserNotForDataset(AccountRole.EUROPEANA_DATA_OFFICER, dataset), - getDatasetProperty.apply(dataset))); - if (!allowRead) { - expectUnauthorizedException(() -> authorizeAction.authorize(createUserForDataset(AccountRole.PROVIDER_VIEWER, dataset), - getDatasetProperty.apply(dataset))); - } - expectUnauthorizedException(() -> authorizeAction.authorize(createUserNotForDataset(AccountRole.PROVIDER_VIEWER, dataset), - getDatasetProperty.apply(dataset))); - expectUnauthorizedException(() -> authorizeAction.authorize(createUserForDataset(null, dataset), - getDatasetProperty.apply(dataset))); - expectUnauthorizedException(() -> authorizeAction.authorize(createUserNotForDataset(null, dataset), - getDatasetProperty.apply(dataset))); - expectUnauthorizedException(() -> authorizeAction.authorize(null, getDatasetProperty.apply(dataset))); - } - - @Test - void testNonExistingDatasetForId() - throws UserUnauthorizedException, NoDatasetFoundException { - testNonExistingDataset(authorizer::authorizeWriteExistingDatasetById, "", false); - testNonExistingDataset(authorizer::authorizeReadExistingDatasetById, "", true); - testNonExistingDataset(authorizer::authorizeReadExistingDatasetByName, "", true); - } - - private void testNonExistingDataset(ExistingDatasetAuthorizer authorizeAction, - T nonExistingValue, boolean allowRead) - throws UserUnauthorizedException, NoDatasetFoundException { - expectNoDatasetFoundException(() -> authorizeAction.authorize(createUser(AccountRole.METIS_ADMIN), nonExistingValue)); - expectNoDatasetFoundException(() -> authorizeAction.authorize(createUser(AccountRole.EUROPEANA_DATA_OFFICER), nonExistingValue)); - if (allowRead) { - expectNoDatasetFoundException(() -> authorizeAction.authorize(createUser(AccountRole.PROVIDER_VIEWER), nonExistingValue)); - } else { - expectUnauthorizedException(() -> authorizeAction.authorize(createUser(AccountRole.PROVIDER_VIEWER), nonExistingValue)); - } - expectUnauthorizedException(() -> authorizeAction.authorize(createUser(null), nonExistingValue)); - expectUnauthorizedException(() -> authorizeAction.authorize(null, nonExistingValue)); - } - - private interface TestAction { - void test() throws UserUnauthorizedException, NoDatasetFoundException; - } - - private interface ExistingDatasetAuthorizer { - Dataset authorize(MetisUserView metisUserView, T property) - throws UserUnauthorizedException, NoDatasetFoundException; - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDatasetService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDatasetService.java deleted file mode 100644 index 96b0584ed9..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDatasetService.java +++ /dev/null @@ -1,801 +0,0 @@ -package eu.europeana.metis.core.service; - -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import com.github.tomakehurst.wiremock.WireMockServer; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetSearchView; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.exceptions.DatasetAlreadyExistsException; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoXsltFoundException; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.network.NetworkUtil; -import eu.europeana.metis.utils.RestEndpoints; -import java.io.IOException; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.w3c.dom.Document; -import org.xml.sax.InputSource; - -class TestDatasetService { - - private static int portForWireMock = 9999; - - static { - try { - portForWireMock = new NetworkUtil().getAvailableLocalPort(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - private static WireMockServer wireMockServer; - - private DatasetDao datasetDao; - private DatasetXsltDao datasetXsltDao; - private WorkflowExecutionDao workflowExecutionDao; - private ScheduledWorkflowDao scheduledWorkflowDao; - private DatasetService datasetService; - private RedissonClient redissonClient; - private Authorizer authorizer; - - private static final String DATASET_CREATION_LOCK = "datasetCreationLock"; - - @BeforeAll - static void setUp() { - wireMockServer = new WireMockServer(wireMockConfig().port(portForWireMock)); - wireMockServer.start(); - } - - @AfterAll - static void destroy() { - wireMockServer.stop(); - } - - private static void expectException(Class exceptionType, - TestAction action) throws GenericMetisException { - try { - action.test(); - fail(""); - } catch (GenericMetisException e) { - if (!e.getClass().equals(exceptionType)) { - throw e; - } - } - } - - private interface TestAction { - - void test() throws GenericMetisException; - } - - @BeforeEach - void prepare() { - datasetDao = mock(DatasetDao.class); - datasetXsltDao = mock(DatasetXsltDao.class); - WorkflowDao workflowDao = mock(WorkflowDao.class); - workflowExecutionDao = mock(WorkflowExecutionDao.class); - scheduledWorkflowDao = mock(ScheduledWorkflowDao.class); - redissonClient = mock(RedissonClient.class); - authorizer = mock(Authorizer.class); - - datasetService = new DatasetService(datasetDao, datasetXsltDao, workflowDao, - workflowExecutionDao, scheduledWorkflowDao, redissonClient, authorizer); - datasetService.setMetisCoreUrl(String.format("http://localhost:%d", portForWireMock)); - } - - @Test - void testCreateDataset() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(DATASET_CREATION_LOCK)).thenReturn(rlock); - when(datasetDao.getDatasetByOrganizationIdAndDatasetName(dataset.getOrganizationId(), - dataset.getDatasetName())).thenReturn(null); - when(datasetDao.findNextInSequenceDatasetId()).thenReturn(1); - datasetService.createDataset(metisUserView, dataset); - ArgumentCaptor datasetArgumentCaptor = ArgumentCaptor.forClass(Dataset.class); - verify(datasetDao, times(1)). - create(datasetArgumentCaptor.capture()); - verify(datasetDao, times(1)).create(any(Dataset.class)); - assertEquals(dataset.getDatasetName(), datasetArgumentCaptor.getValue().getDatasetName()); - assertEquals(metisUserView.getUserId(), datasetArgumentCaptor.getValue().getCreatedByUserId()); - assertEquals(metisUserView.getOrganizationId(), - datasetArgumentCaptor.getValue().getOrganizationId()); - assertEquals(metisUserView.getOrganizationName(), - datasetArgumentCaptor.getValue().getOrganizationName()); - verify(authorizer, times(1)).authorizeWriteNewDataset(metisUserView); - verifyNoMoreInteractions(authorizer); - } - - @Test - void testCreateDatasetUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeWriteNewDataset( - metisUserView); - expectException(UserUnauthorizedException.class, - () -> datasetService.createDataset(metisUserView, dataset)); - verify(datasetDao, times(0)).create(dataset); - } - - @Test - void testCreateDatasetAlreadyExists() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(DATASET_CREATION_LOCK)).thenReturn(rlock); - when(datasetDao.getDatasetByOrganizationIdAndDatasetName(metisUserView.getOrganizationId(), - dataset.getDatasetName())).thenReturn(dataset); - expectException(DatasetAlreadyExistsException.class, - () -> datasetService.createDataset(metisUserView, dataset)); - verify(datasetDao, times(0)).create(any(Dataset.class)); - verify(datasetDao, times(0)).getById(null); - } - - @Test - void testUpdateDataset() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setProvider("newProvider"); - Dataset storedDataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - storedDataset.setUpdatedDate(new Date(-1000)); - storedDataset.setOrganizationId(metisUserView.getOrganizationId()); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(storedDataset); - when(datasetXsltDao.create(any(DatasetXslt.class))).thenReturn(TestObjectFactory.DATASET_XSLT); - datasetService.updateDataset(metisUserView, dataset, - TestObjectFactory.createXslt(TestObjectFactory.createDataset(dataset.getDatasetName())) - .getXslt()); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - ArgumentCaptor dataSetArgumentCaptor = ArgumentCaptor.forClass(Dataset.class); - verify(datasetDao, times(1)).update(dataSetArgumentCaptor.capture()); - assertEquals(dataset.getProvider(), dataSetArgumentCaptor.getValue().getProvider()); - assertEquals(dataset.getUpdatedDate(), dataSetArgumentCaptor.getValue().getUpdatedDate()); - assertEquals(storedDataset.getCreatedByUserId(), - dataSetArgumentCaptor.getValue().getCreatedByUserId()); - assertNotEquals(storedDataset.getUpdatedDate(), - dataSetArgumentCaptor.getValue().getUpdatedDate()); - - } - - @Test - void testUpdateDatasetNonNullXslt() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setProvider("newProvider"); - Dataset storedDataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - storedDataset.setUpdatedDate(new Date(-1000)); - storedDataset.setOrganizationId(metisUserView.getOrganizationId()); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(storedDataset); - when(datasetXsltDao.create(any(DatasetXslt.class))).thenReturn(TestObjectFactory.DATASET_XSLT); - datasetService.updateDataset(metisUserView, dataset, null); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - ArgumentCaptor dataSetArgumentCaptor = ArgumentCaptor.forClass(Dataset.class); - verify(datasetDao, times(1)).update(dataSetArgumentCaptor.capture()); - assertEquals(dataset.getProvider(), dataSetArgumentCaptor.getValue().getProvider()); - assertEquals(dataset.getUpdatedDate(), dataSetArgumentCaptor.getValue().getUpdatedDate()); - assertEquals(storedDataset.getCreatedByUserId(), - dataSetArgumentCaptor.getValue().getCreatedByUserId()); - assertNotEquals(storedDataset.getUpdatedDate(), - dataSetArgumentCaptor.getValue().getUpdatedDate()); - } - - @Test - void testUpdateDatasetUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - expectException(UserUnauthorizedException.class, - () -> datasetService.updateDataset(metisUserView, dataset, null)); - verify(datasetDao, times(0)).update(dataset); - } - - @Test - void testUpdateDatasetDatasetAlreadyExistsException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - Dataset storedDataset = TestObjectFactory - .createDataset(String.format("%s%s", TestObjectFactory.DATASETNAME, 10)); - storedDataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(storedDataset); - when(datasetDao.getDatasetByOrganizationIdAndDatasetName(dataset.getOrganizationId(), - dataset.getDatasetName())).thenReturn(new Dataset()); - assertThrows(DatasetAlreadyExistsException.class, - () -> datasetService.updateDataset(metisUserView, dataset, null)); - } - - @Test - void testUpdateDatasetDatasetExecutionIsActive() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn("ObjectId"); - assertThrows(BadContentException.class, - () -> datasetService.updateDataset(metisUserView, dataset, null)); - } - - @Test - void testUpdateDatasetNoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - assertThrows(UserUnauthorizedException.class, - () -> datasetService.updateDataset(metisUserView, dataset, null)); - } - - @Test - void testDeleteDatasetByDatasetId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowExecutionDao.existsAndNotCompleted(Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn(null); - datasetService - .deleteDatasetByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID)); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - verify(datasetDao, times(1)).deleteByDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - verify(workflowExecutionDao, times(1)) - .deleteAllByDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - verify(scheduledWorkflowDao, times(1)) - .deleteAllByDatasetId(Integer.toString(TestObjectFactory.DATASETID)); - } - - @Test - void testDeleteDatasetByDatasetIdUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeWriteExistingDatasetById(metisUserView, datasetId); - expectException(UserUnauthorizedException.class, - () -> datasetService.deleteDatasetByDatasetId(metisUserView, datasetId)); - verify(datasetDao, times(0)).deleteByDatasetId(datasetId); - } - - @Test - void testDeleteDatasetByDatasetIdNoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(NoDatasetFoundException.class).when(authorizer) - .authorizeWriteExistingDatasetById(metisUserView, datasetId); - expectException(NoDatasetFoundException.class, - () -> datasetService.deleteDatasetByDatasetId(metisUserView, datasetId)); - verify(datasetDao, times(0)).deleteByDatasetId(datasetId); - } - - @Test - void testDeleteDatasetDatasetExecutionIsActive() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowExecutionDao.existsAndNotCompleted(Integer.toString(TestObjectFactory.DATASETID))) - .thenReturn("ObjectId"); - assertThrows(BadContentException.class, () -> datasetService - .deleteDatasetByDatasetId(metisUserView, Integer.toString(TestObjectFactory.DATASETID))); - } - - @Test - void testGetDatasetByDatasetName() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeReadExistingDatasetByName(metisUserView, TestObjectFactory.DATASETNAME)) - .thenReturn(dataset); - Dataset returnedDataset = datasetService - .getDatasetByDatasetName(metisUserView, TestObjectFactory.DATASETNAME); - assertNotNull(returnedDataset); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetByName(metisUserView, TestObjectFactory.DATASETNAME); - verifyNoMoreInteractions(authorizer); - } - - @Test - void testGetDatasetByDatasetNameUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeReadExistingDatasetByName(metisUserView, TestObjectFactory.DATASETNAME); - expectException(UserUnauthorizedException.class, - () -> datasetService.getDatasetByDatasetName(metisUserView, TestObjectFactory.DATASETNAME)); - verify(datasetDao, times(0)).getDatasetByDatasetName(TestObjectFactory.DATASETNAME); - } - - @Test - void testGetDatasetByDatasetNameNoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - doThrow(NoDatasetFoundException.class).when(authorizer) - .authorizeReadExistingDatasetByName(metisUserView, TestObjectFactory.DATASETNAME); - assertThrows(NoDatasetFoundException.class, - () -> datasetService.getDatasetByDatasetName(metisUserView, TestObjectFactory.DATASETNAME)); - } - - @Test - void testGetDatasetByDatasetId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - Dataset returnedDataset = - datasetService.getDatasetByDatasetId(metisUserView, dataset.getDatasetId()); - assertNotNull(returnedDataset); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - } - - @Test - void testGetDatasetByDatasetIdUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, datasetId); - expectException(UserUnauthorizedException.class, - () -> datasetService.getDatasetByDatasetId(metisUserView, datasetId)); - verify(datasetDao, times(0)).getDatasetByDatasetId(datasetId); - } - - @Test - void testGetDatasetByDatasetIdNoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(NoDatasetFoundException.class).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, datasetId); - assertThrows(NoDatasetFoundException.class, - () -> datasetService.getDatasetByDatasetId(metisUserView, datasetId)); - } - - @Test - void getDatasetXsltByDatasetId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - dataset.setXsltId(new ObjectId()); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - when(datasetXsltDao.getById(dataset.getXsltId().toString())).thenReturn(datasetXslt); - - DatasetXslt datasetXsltByDatasetId = datasetService - .getDatasetXsltByDatasetId(metisUserView, dataset.getDatasetId()); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - assertEquals(datasetXslt.getXslt(), datasetXsltByDatasetId.getXslt()); - assertEquals(datasetXslt.getDatasetId(), datasetXsltByDatasetId.getDatasetId()); - } - - @Test - void getDatasetXsltByDatasetIdUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(UserUnauthorizedException.class).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, datasetId); - assertThrows(UserUnauthorizedException.class, - () -> datasetService.getDatasetXsltByDatasetId(metisUserView, datasetId)); - } - - @Test - void getDatasetXsltByDatasetIdNoXsltFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(datasetXsltDao.getById(anyString())).thenReturn(null); - assertThrows(NoXsltFoundException.class, - () -> datasetService.getDatasetXsltByDatasetId(metisUserView, dataset.getDatasetId())); - } - - @Test - void getDatasetXsltByDatasetIdNoDatasetFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - doThrow(NoDatasetFoundException.class).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, datasetId); - assertThrows(NoDatasetFoundException.class, - () -> datasetService.getDatasetXsltByDatasetId(metisUserView, datasetId)); - } - - @Test - void getDatasetXsltByXsltId() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - when(datasetXsltDao.getById(TestObjectFactory.DATASET_XSLT.getId().toString())).thenReturn(datasetXslt); - - DatasetXslt datasetXsltByDatasetId = datasetService - .getDatasetXsltByXsltId(TestObjectFactory.DATASET_XSLT.getId().toString()); - assertEquals(datasetXslt.getXslt(), datasetXsltByDatasetId.getXslt()); - assertEquals(datasetXslt.getDatasetId(), datasetXsltByDatasetId.getDatasetId()); - } - - @Test - void getDatasetXsltByXsltIdNoXsltFoundException() { - when(datasetXsltDao.getById(TestObjectFactory.DATASET_XSLT.getId().toString())).thenReturn(null); - assertThrows(NoXsltFoundException.class, - () -> datasetService.getDatasetXsltByXsltId(TestObjectFactory.DATASET_XSLT.getId().toString())); - } - - @Test - void createDefaultXslt() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - DatasetXslt datasetXslt = TestObjectFactory - .createXslt(TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME)); - datasetXslt.setDatasetId("-1"); - when(datasetXsltDao.create(any(DatasetXslt.class))).thenReturn(datasetXslt); - DatasetXslt defaultDatasetXslt = datasetService - .createDefaultXslt(metisUserView, datasetXslt.getXslt()); - assertEquals(datasetXslt.getDatasetId(), defaultDatasetXslt.getDatasetId()); - verify(authorizer, times(1)).authorizeWriteDefaultXslt(metisUserView); - verifyNoMoreInteractions(authorizer); - } - - @Test - void createDefaultXsltUserUnauthorizedException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - DatasetXslt datasetXslt = TestObjectFactory - .createXslt(TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME)); - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeWriteDefaultXslt( - metisUserView); - assertThrows(UserUnauthorizedException.class, - () -> datasetService.createDefaultXslt(metisUserView, datasetXslt.getXslt())); - } - - @Test - void getLatestDefaultXslt() throws Exception { - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - when(datasetXsltDao.getLatestDefaultXslt()).thenReturn(datasetXslt); - - DatasetXslt datasetXsltByDatasetId = datasetService.getLatestDefaultXslt(); - assertEquals(datasetXslt.getXslt(), datasetXsltByDatasetId.getXslt()); - assertEquals(datasetXslt.getDatasetId(), datasetXsltByDatasetId.getDatasetId()); - } - - @Test - void getLatestDefaultXsltNoXsltFoundException() { - when(datasetXsltDao.create(TestObjectFactory.DATASET_XSLT)).thenReturn(null); - assertThrows(NoXsltFoundException.class, () -> datasetService.getLatestDefaultXslt()); - } - - @Test - void transformRecordsUsingLatestDefaultXslt() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - when(datasetXsltDao.getLatestDefaultXslt()).thenReturn(datasetXslt); - List listOfRecords = TestObjectFactory.createListOfRecords(5); - listOfRecords.getFirst().setXmlRecord("invalid xml"); - - String xsltUrl = RestEndpoints - .resolve(RestEndpoints.DATASETS_XSLT_XSLTID, - Collections.singletonList(datasetXslt.getId().toString())); - wireMockServer.stubFor(get(urlEqualTo(xsltUrl)) - .willReturn(aResponse() - .withStatus(200) - .withHeader("Content-Type", "text/plain") - .withBody(datasetXslt.getXslt()))); - - List records = datasetService - .transformRecordsUsingLatestDefaultXslt(metisUserView, dataset.getDatasetId(), listOfRecords); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc; - assertFalse(records.getFirst().getXmlRecord().contains("edm:ProvidedCHO")); //First record is invalid - for (int i = 1; i < records.size(); i++) { - doc = dBuilder.parse(new InputSource(new StringReader(records.get(i).getXmlRecord()))); - assertEquals(1, doc.getElementsByTagName("edm:ProvidedCHO").getLength()); - assertTrue(doc.getElementsByTagName("edm:ProvidedCHO").item(0).getAttributes() - .getNamedItem("rdf:about").getTextContent().contains(Integer.toString(i))); - } - } - - @Test - void transformRecordsUsingLatestDefaultXslt_NoXsltFoundException() { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - when(datasetXsltDao.getLatestDefaultXslt()).thenReturn(null); - List listOfRecords = TestObjectFactory.createListOfRecords(1); - assertThrows(NoXsltFoundException.class, () -> datasetService - .transformRecordsUsingLatestDefaultXslt(metisUserView, dataset.getDatasetId(), listOfRecords)); - } - - @Test - void transformRecordsUsingLatestDatasetXslt() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - dataset.setXsltId(new ObjectId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - when(datasetXsltDao.getById(dataset.getXsltId().toString())).thenReturn(datasetXslt); - List listOfRecords = TestObjectFactory.createListOfRecords(5); - - String xsltUrl = RestEndpoints - .resolve(RestEndpoints.DATASETS_XSLT_XSLTID, - Collections.singletonList(datasetXslt.getId().toString())); - wireMockServer.stubFor(get(urlEqualTo(xsltUrl)) - .willReturn(aResponse() - .withStatus(200) - .withHeader("Content-Type", "text/plain") - .withBody(datasetXslt.getXslt()))); - - List records = datasetService - .transformRecordsUsingLatestDatasetXslt(metisUserView, dataset.getDatasetId(), listOfRecords); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc; - for (int i = 0; i < records.size(); i++) { - doc = dBuilder.parse(new InputSource(new StringReader(records.get(i).getXmlRecord()))); - assertEquals(1, doc.getElementsByTagName("edm:ProvidedCHO").getLength()); - assertTrue(doc.getElementsByTagName("edm:ProvidedCHO").item(0).getAttributes() - .getNamedItem("rdf:about").getTextContent().contains(Integer.toString(i))); - } - } - - @Test - void transformRecordsUsingLatestDatasetXslt_NoXsltFoundException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setOrganizationId(metisUserView.getOrganizationId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - List listOfRecords = TestObjectFactory.createListOfRecords(1); - assertThrows(NoXsltFoundException.class, () -> datasetService - .transformRecordsUsingLatestDatasetXslt(metisUserView, dataset.getDatasetId(), listOfRecords)); - } - - @Test - void testGetAllDatasetsByProvider() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String provider = "myProvider"; - int nextPage = 1; - when(datasetDao.getAllDatasetsByProvider(provider, nextPage)).thenReturn(list); - List retList = datasetService.getAllDatasetsByProvider(metisUserView, provider, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertSame(list, retList); - } - - @Test - void testGetAllDatasetsByProviderUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String provider = "myProvider"; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, - () -> datasetService.getAllDatasetsByProvider(metisUserView, provider, nextPage)); - verify(datasetDao, times(0)).getAllDatasetsByProvider(provider, nextPage); - } - - @Test - void testGetAllDatasetsByIntermidiateProvider() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String provider = "myProvider"; - int nextPage = 1; - when(datasetDao.getAllDatasetsByIntermediateProvider(provider, nextPage)).thenReturn(list); - List retList = datasetService - .getAllDatasetsByIntermediateProvider(metisUserView, provider, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertSame(list, retList); - } - - @Test - void testGetAllDatasetsByIntermidiateProviderUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String intermediateProvider = "myProvider"; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, - () -> datasetService.getAllDatasetsByIntermediateProvider(metisUserView, intermediateProvider, - nextPage)); - verify(datasetDao, times(0)).getAllDatasetsByIntermediateProvider(intermediateProvider, - nextPage); - } - - @Test - void testGetAllDatasetsByDataProvider() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String provider = "myProvider"; - int nextPage = 1; - when(datasetDao.getAllDatasetsByDataProvider(provider, nextPage)).thenReturn(list); - List retList = datasetService - .getAllDatasetsByDataProvider(metisUserView, provider, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertSame(list, retList); - } - - @Test - void testGetAllDatasetsByDataProviderUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String dataProvider = "myProvider"; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, - () -> datasetService.getAllDatasetsByDataProvider(metisUserView, dataProvider, nextPage)); - verify(datasetDao, times(0)).getAllDatasetsByDataProvider(dataProvider, nextPage); - } - - @Test - void testGetAllDatasetsByOrganizationId() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String organizationId = "organizationId"; - int nextPage = 1; - when(datasetDao.getAllDatasetsByOrganizationId(organizationId, nextPage)).thenReturn(list); - List retList = datasetService - .getAllDatasetsByOrganizationId(metisUserView, organizationId, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertSame(list, retList); - } - - @Test - void testGetAllDatasetsByOrganizationIdUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String organizationId = "organizationId"; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, - () -> datasetService.getAllDatasetsByOrganizationId(metisUserView, organizationId, nextPage)); - verify(datasetDao, times(0)).getAllDatasetsByOrganizationId(organizationId, nextPage); - } - - @Test - void testGetAllDatasetsByOrganizationName() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String organizationName = "organizationName"; - int nextPage = 1; - when(datasetDao.getAllDatasetsByOrganizationName(organizationName, nextPage)) - .thenReturn(list); - List retList = datasetService - .getAllDatasetsByOrganizationName(metisUserView, organizationName, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertSame(list, retList); - } - - @Test - void testGetAllDatasetsByOrganizationNameUnauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String organizationName = "organizationName"; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, () -> datasetService - .getAllDatasetsByOrganizationName(metisUserView, organizationName, nextPage)); - verify(datasetDao, times(0)).getAllDatasetsByOrganizationName(organizationName, nextPage); - } - - @Test - void testSearchDatasetsBasedOnSearchString() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List list = new ArrayList<>(); - String searchString = "test 0"; - int nextPage = 1; - when(datasetDao.searchDatasetsBasedOnSearchString(Collections.singletonList("0"), - Collections.singletonList("0"), nextPage)).thenReturn(list); - List retList = datasetService - .searchDatasetsBasedOnSearchString(metisUserView, searchString, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - assertEquals(list.size(), retList.size()); - } - - @Test - void testSearchDatasetsBasedOnSearchString_BadContentException() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String searchString = ""; - int nextPage = 1; - expectException(BadContentException.class, - () -> datasetService.searchDatasetsBasedOnSearchString(metisUserView, searchString, nextPage)); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - } - - @Test - void testSearchDatasetsBasedOnSearchString_Unauthorized() throws Exception { - MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - String searchString = ""; - int nextPage = 1; - doThrow(UserUnauthorizedException.class).when(authorizer).authorizeReadAllDatasets( - metisUserView); - expectException(UserUnauthorizedException.class, () -> datasetService - .searchDatasetsBasedOnSearchString(metisUserView, searchString, nextPage)); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - } - - @Test - void testGetDatasetsPerRequestLimit() { - when(datasetDao.getDatasetsPerRequest()).thenReturn(5); - assertEquals(5, datasetService.getDatasetsPerRequestLimit()); - } -} - diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDepublishRecordIdService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDepublishRecordIdService.java deleted file mode 100644 index 6cfdec5875..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestDepublishRecordIdService.java +++ /dev/null @@ -1,175 +0,0 @@ -package eu.europeana.metis.core.service; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; - -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation.PublicationStatus; -import eu.europeana.metis.core.dataset.DepublishRecordId; -import eu.europeana.metis.core.rest.DepublishRecordIdView; -import eu.europeana.metis.core.util.DepublishRecordIdSortField; -import eu.europeana.metis.core.util.SortDirection; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.exception.GenericMetisException; -import java.util.List; -import java.util.Set; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -public class TestDepublishRecordIdService { - - private static Authorizer authorizer; - private static OrchestratorService orchestratorService; - private static DepublishRecordIdDao depublishRecordIdDao; - private static DepublishRecordIdService depublishRecordIdService; - private static MetisUserView metisUserView; - private static String datasetId; - - @BeforeAll - static void setUp() { - - authorizer = mock(Authorizer.class); - orchestratorService = mock(OrchestratorService.class); - depublishRecordIdDao = mock(DepublishRecordIdDao.class); - metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - datasetId = Integer.toString(TestObjectFactory.DATASETID); - - depublishRecordIdService = spy(new DepublishRecordIdService(authorizer, orchestratorService, - depublishRecordIdDao)); - - } - - @BeforeEach - void cleanUp() { - reset(authorizer); - reset(orchestratorService); - reset(depublishRecordIdDao); - reset(depublishRecordIdService); - } - - @Test - void addRecordIdsToBeDepublishedTest() throws GenericMetisException { - depublishRecordIdService.addRecordIdsToBeDepublished(metisUserView, datasetId, "1002"); - - verify(authorizer, times(1)).authorizeWriteExistingDatasetById(metisUserView, datasetId); - verify(depublishRecordIdService, times(1)).checkAndNormalizeRecordIds(any(), any()); - verify(depublishRecordIdDao, times(1)).createRecordIdsToBeDepublished(any(), any()); - verifyNoMoreInteractions(orchestratorService); - - - } - - @Test - void deletePendingRecordIdsTest() throws GenericMetisException { - depublishRecordIdService.deletePendingRecordIds(metisUserView, datasetId, "1002"); - - verify(authorizer, times(1)).authorizeWriteExistingDatasetById(metisUserView, datasetId); - verify(depublishRecordIdService, times(1)).checkAndNormalizeRecordIds(any(), any()); - verify(depublishRecordIdDao, times(1)).deletePendingRecordIds(any(), any()); - verifyNoMoreInteractions(orchestratorService); - - } - - @Test - void getDepublishRecordIdsTest() throws GenericMetisException { - - // Mock the DAO - final DepublishRecordId record = new DepublishRecordId(); - record.setRecordId("RECORD_ID"); - doReturn(List.of(new DepublishRecordIdView(record))).when(depublishRecordIdDao) - .getDepublishRecordIds(eq(datasetId), anyInt(), any(), any(), anyString()); - - // Make the actual call - final var result = depublishRecordIdService.getDepublishRecordIds(metisUserView, datasetId, 1, - DepublishRecordIdSortField.RECORD_ID, SortDirection.ASCENDING, "search"); - - // Verify the interactions - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verify(depublishRecordIdDao, times(1)).getDepublishRecordIds(datasetId, - 1, DepublishRecordIdSortField.RECORD_ID, SortDirection.ASCENDING, "search"); - verify(depublishRecordIdDao, times(1)).getDepublishRecordIds(anyString(), - anyInt(), any(), any(), anyString()); - verifyNoMoreInteractions(orchestratorService); - - // verify the result - assertEquals(1, result.getListSize()); - assertEquals(1, result.getResults().size()); - assertEquals(record.getRecordId(), result.getResults().get(0).getRecordId()); - } - - @Test - void createAndAddInQueueDepublishWorkflowExecutionTest() throws GenericMetisException { - //Mock Workflow and Set - String mockRecordIdsSeparateLines = "RECORD_ID"; - final Set mockNormalizedRecordIds = Set.of(mockRecordIdsSeparateLines); - doReturn(mockNormalizedRecordIds).when(depublishRecordIdService).checkAndNormalizeRecordIds(datasetId, - mockRecordIdsSeparateLines); - - //Do the actual call - WorkflowExecution result = depublishRecordIdService - .createAndAddInQueueDepublishWorkflowExecution(metisUserView, datasetId, true, 1, mockRecordIdsSeparateLines); - - //Verify interactions - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verify(orchestratorService, times(1)) - .addWorkflowInQueueOfWorkflowExecutions(any(), anyString(), any(), any(), anyInt()); - - //Verify values - ArgumentCaptor workflowArgumentCaptor = ArgumentCaptor.forClass(Workflow.class); - verify(orchestratorService, times(1)) - .addWorkflowInQueueOfWorkflowExecutions(any(), anyString(), workflowArgumentCaptor.capture(), any(), anyInt()); - Workflow sentWorkflow = workflowArgumentCaptor.getValue(); - assertEquals(datasetId, sentWorkflow.getDatasetId()); - } - - @Test - void canTriggerDepublicationResultTrueTest() throws GenericMetisException { - final DatasetExecutionInformation mockExecutionInformation = mock( - DatasetExecutionInformation.class); - - doReturn(mockExecutionInformation).when(orchestratorService) - .getDatasetExecutionInformation(datasetId); - doReturn(PublicationStatus.PUBLISHED).when(mockExecutionInformation).getPublicationStatus(); - doReturn(true).when(mockExecutionInformation).isLastPreviewRecordsReadyForViewing(); - doReturn(true).when(mockExecutionInformation).isLastPublishedRecordsReadyForViewing(); - boolean result = depublishRecordIdService.canTriggerDepublication(metisUserView, datasetId); - - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verify(orchestratorService, times(1)).getRunningOrInQueueExecution(datasetId); - verify(orchestratorService, times(1)).getDatasetExecutionInformation(datasetId); - verify(mockExecutionInformation, times(1)).getPublicationStatus(); - verify(mockExecutionInformation, times(1)).isLastPublishedRecordsReadyForViewing(); - assertTrue(result); - } - - @Test - void canTriggerDepublicationResultFalseTest() throws GenericMetisException { - final WorkflowExecution mockWorkflow = mock(WorkflowExecution.class); - - doReturn(mockWorkflow).when(orchestratorService).getRunningOrInQueueExecution(datasetId); - boolean result = depublishRecordIdService.canTriggerDepublication(metisUserView, datasetId); - - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verify(orchestratorService, times(1)).getRunningOrInQueueExecution(datasetId); - assertFalse(result); - } - -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestOrchestratorService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestOrchestratorService.java deleted file mode 100644 index ac3d15405a..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestOrchestratorService.java +++ /dev/null @@ -1,1220 +0,0 @@ -package eu.europeana.metis.core.service; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.DaoFieldNames; -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ResultList; -import eu.europeana.metis.core.dao.WorkflowValidationUtils; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.DatasetExecutionInformation; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.PluginExecutionNotAllowed; -import eu.europeana.metis.core.exceptions.WorkflowAlreadyExistsException; -import eu.europeana.metis.core.exceptions.WorkflowExecutionAlreadyExistsException; -import eu.europeana.metis.core.execution.WorkflowExecutorManager; -import eu.europeana.metis.core.rest.ExecutionHistory; -import eu.europeana.metis.core.rest.PluginsWithDataAvailability; -import eu.europeana.metis.core.rest.VersionEvolution; -import eu.europeana.metis.core.rest.VersionEvolution.VersionEvolutionStep; -import eu.europeana.metis.core.rest.execution.overview.DatasetSummaryView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionAndDatasetView; -import eu.europeana.metis.core.rest.execution.overview.ExecutionSummaryView; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.ValidationProperties; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPlugin; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import eu.europeana.metis.utils.DateUtils; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.EnumSet; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.bson.types.ObjectId; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InOrder; -import org.mockito.Mockito; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2017-10-06 - */ -class TestOrchestratorService { - - private static final int SOLR_COMMIT_PERIOD_IN_MINS = 15; - private static WorkflowExecutionDao workflowExecutionDao; - private static DataEvolutionUtils dataEvolutionUtils; - private static RedirectionInferrer redirectionInferrer; - private static WorkflowValidationUtils validationUtils; - private static WorkflowDao workflowDao; - private static DatasetDao datasetDao; - private static DatasetXsltDao datasetXsltDao; - private static DepublishRecordIdDao depublishRecordIdDao; - private static WorkflowExecutorManager workflowExecutorManager; - private static WorkflowExecutionFactory workflowExecutionFactory; - private static OrchestratorService orchestratorService; - private static RedissonClient redissonClient; - private static Authorizer authorizer; - - @BeforeAll - static void prepare() { - workflowExecutionDao = mock(WorkflowExecutionDao.class); - dataEvolutionUtils = mock(DataEvolutionUtils.class); - validationUtils = mock(WorkflowValidationUtils.class); - workflowDao = mock(WorkflowDao.class); - datasetDao = mock(DatasetDao.class); - datasetXsltDao = mock(DatasetXsltDao.class); - depublishRecordIdDao = mock(DepublishRecordIdDao.class); - workflowExecutorManager = mock(WorkflowExecutorManager.class); - redissonClient = mock(RedissonClient.class); - authorizer = mock(Authorizer.class); - - redirectionInferrer = new RedirectionInferrer(workflowExecutionDao, dataEvolutionUtils); - workflowExecutionFactory = spy(new WorkflowExecutionFactory(datasetXsltDao, - depublishRecordIdDao, redirectionInferrer)); - workflowExecutionFactory.setValidationExternalProperties( - new ValidationProperties("url-ext", "schema-ext", "schematron-ext")); - workflowExecutionFactory.setValidationInternalProperties( - new ValidationProperties("url-int", "schema-int", "schematron-int")); - - orchestratorService = spy(new OrchestratorService(workflowExecutionFactory, workflowDao, - workflowExecutionDao, validationUtils, dataEvolutionUtils, datasetDao, - workflowExecutorManager, redissonClient, authorizer, depublishRecordIdDao)); - orchestratorService.setSolrCommitPeriodInMins(SOLR_COMMIT_PERIOD_IN_MINS); - } - - @AfterEach - void cleanUp() { - Mockito.reset(workflowExecutionDao); - Mockito.reset(validationUtils); - Mockito.reset(workflowDao); - Mockito.reset(datasetDao); - Mockito.reset(workflowExecutorManager); - Mockito.reset(redissonClient); - Mockito.reset(authorizer); - Mockito.reset(workflowExecutionFactory); - Mockito.reset(orchestratorService); - } - - @Test - void createWorkflow() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - workflow.setDatasetId(dataset.getDatasetId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - orchestratorService.createWorkflow(metisUserView, workflow.getDatasetId(), workflow, null); - - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, workflow.getDatasetId()); - verifyNoMoreInteractions(authorizer); - InOrder inOrder = Mockito.inOrder(workflowDao); - inOrder.verify(workflowDao, times(1)).workflowExistsForDataset(workflow.getDatasetId()); - inOrder.verify(workflowDao, times(1)).create(workflow); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void createWorkflowOrderOfPluginsNotAllowed() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - workflow.setDatasetId(dataset.getDatasetId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - doThrow(PluginExecutionNotAllowed.class).when(validationUtils) - .validateWorkflowPlugins(workflow, null); - assertThrows(PluginExecutionNotAllowed.class, - () -> orchestratorService - .createWorkflow(metisUserView, workflow.getDatasetId(), workflow, null)); - - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, workflow.getDatasetId()); - verifyNoMoreInteractions(authorizer); - verify(workflowDao, times(1)).workflowExistsForDataset(workflow.getDatasetId()); - verifyNoMoreInteractions(workflowDao); - } - - @Test - void createWorkflow_AlreadyExists() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - workflow.setDatasetId(dataset.getDatasetId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - when(workflowDao.workflowExistsForDataset(workflow.getDatasetId())).thenReturn(true); - - assertThrows(WorkflowAlreadyExistsException.class, - () -> orchestratorService - .createWorkflow(metisUserView, workflow.getDatasetId(), workflow, null)); - - InOrder inOrder = Mockito.inOrder(workflowDao); - inOrder.verify(workflowDao, times(1)).workflowExistsForDataset(workflow.getDatasetId()); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void updateWorkflow() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - workflow.setDatasetId(dataset.getDatasetId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - when(workflowDao.getWorkflow(dataset.getDatasetId())).thenReturn(workflow); - orchestratorService.updateWorkflow(metisUserView, workflow.getDatasetId(), workflow, null); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, workflow.getDatasetId()); - verifyNoMoreInteractions(authorizer); - InOrder inOrder = Mockito.inOrder(workflowDao); - inOrder.verify(workflowDao, times(1)).getWorkflow(dataset.getDatasetId()); - inOrder.verify(workflowDao, times(1)).update(workflow); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void updateUserWorkflow_NoUserWorkflowFound() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset("datasetName"); - workflow.setDatasetId(dataset.getDatasetId()); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - assertThrows(NoWorkflowFoundException.class, - () -> orchestratorService - .updateWorkflow(metisUserView, workflow.getDatasetId(), workflow, null)); - InOrder inOrder = Mockito.inOrder(workflowDao); - inOrder.verify(workflowDao, times(1)).getWorkflow(anyString()); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void deleteWorkflow() throws GenericMetisException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - orchestratorService.deleteWorkflow(metisUserView, workflow.getDatasetId()); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, workflow.getDatasetId()); - verifyNoMoreInteractions(authorizer); - ArgumentCaptor workflowDatasetIdArgumentCaptor = ArgumentCaptor - .forClass(String.class); - verify(workflowDao, times(1)).deleteWorkflow(workflowDatasetIdArgumentCaptor.capture()); - assertEquals(workflow.getDatasetId(), - workflowDatasetIdArgumentCaptor.getValue()); - } - - @Test - void getWorkflow() throws GenericMetisException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - - Workflow retrievedWorkflow = orchestratorService - .getWorkflow(metisUserView, workflow.getDatasetId()); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflow.getDatasetId()); - verifyNoMoreInteractions(authorizer); - assertSame(workflow, retrievedWorkflow); - } - - @Test - void getWorkflowExecutionByExecutionId() throws GenericMetisException { - - // Create some objects - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String workflowExecutionId = "workflow execution ID"; - final WorkflowExecution workflowExecution = mock(WorkflowExecution.class); - final String datasetId = "dataset ID"; - when(workflowExecution.getDatasetId()).thenReturn(datasetId); - - // Test the happy flow - when(workflowExecutionDao.getById(workflowExecutionId)).thenReturn(workflowExecution); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId)).thenReturn(null); - assertSame(workflowExecution, - orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)); - - // Test when the workflow execution does not exist - when(workflowExecutionDao.getById(workflowExecutionId)).thenReturn(null); - assertNull( - orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)); - when(workflowExecutionDao.getById(workflowExecutionId)).thenReturn(workflowExecution); - assertSame(workflowExecution, - orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)); - - // Test when the user is not allowed - when(authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId)) - .thenThrow(new UserUnauthorizedException("")); - assertThrows(UserUnauthorizedException.class, () -> orchestratorService - .getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)); - doReturn(null).when(authorizer).authorizeReadExistingDatasetById(metisUserView, datasetId); - assertSame(workflowExecution, - orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)); - } - - @Test - void getWorkflowExecutionByExecutionId_NonExistingWorkflowExecution() - throws GenericMetisException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String workflowExecutionId = "workflow execution id"; - when(workflowExecutionDao.getById(workflowExecutionId)).thenReturn(null); - orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId); - verifyNoMoreInteractions(authorizer); - InOrder inOrder = Mockito.inOrder(workflowExecutionDao); - inOrder.verify(workflowExecutionDao, times(1)).getById(workflowExecutionId); - inOrder.verifyNoMoreInteractions(); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions() throws Exception { - - // Create the test objects - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(datasetDao.getDatasetByDatasetId(dataset.getDatasetId())).thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(anyString())).thenReturn(rlock); - doNothing().when(rlock).lock(); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - datasetXslt.setId(TestObjectFactory.DATASET_XSLT.getId()); - when(datasetXsltDao.getLatestDefaultXslt()).thenReturn(datasetXslt); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(rlock).unlock(); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - - // Add the workflow - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - orchestratorService - .addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(dataset.getDatasetId(), null, - null, - 0); - verifyNoMoreInteractions(authorizer); - - // Verify the validation parameters - final Map pluginsByType = workflow - .getMetisPluginsMetadata().stream().collect(Collectors - .toMap(AbstractExecutablePluginMetadata::getExecutablePluginType, Function.identity(), - (m1, m2) -> m1)); - final ValidationInternalPluginMetadata metadataInternal = - (ValidationInternalPluginMetadata) pluginsByType - .get(ExecutablePluginType.VALIDATION_INTERNAL); - assertEquals(workflowExecutionFactory.getValidationInternalProperties().getUrlOfSchemasZip(), - metadataInternal.getUrlOfSchemasZip()); - assertEquals(workflowExecutionFactory.getValidationInternalProperties().getSchemaRootPath(), - metadataInternal.getSchemaRootPath()); - assertEquals(workflowExecutionFactory.getValidationInternalProperties().getSchematronRootPath(), - metadataInternal.getSchematronRootPath()); - final ValidationExternalPluginMetadata metadataExternal = - (ValidationExternalPluginMetadata) pluginsByType - .get(ExecutablePluginType.VALIDATION_EXTERNAL); - assertEquals(workflowExecutionFactory.getValidationExternalProperties().getUrlOfSchemasZip(), - metadataExternal.getUrlOfSchemasZip()); - assertEquals(workflowExecutionFactory.getValidationExternalProperties().getSchemaRootPath(), - metadataExternal.getSchemaRootPath()); - assertEquals(workflowExecutionFactory.getValidationExternalProperties().getSchematronRootPath(), - metadataExternal.getSchematronRootPath()); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_TransformationUsesCustomXslt() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflow.getMetisPluginsMetadata().forEach(abstractMetisPluginMetadata -> { - if (abstractMetisPluginMetadata instanceof TransformationPluginMetadata) { - ((TransformationPluginMetadata) abstractMetisPluginMetadata).setCustomXslt(true); - } - }); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(anyString())).thenReturn(rlock); - doNothing().when(rlock).lock(); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - DatasetXslt datasetXslt = TestObjectFactory.createXslt(dataset); - datasetXslt.setId(TestObjectFactory.DATASET_XSLT.getId()); - dataset.setXsltId(datasetXslt.getId()); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(datasetXsltDao.getById(dataset.getXsltId().toString())).thenReturn(datasetXslt); - when(workflowExecutionDao.create(any(WorkflowExecution.class))) - .thenReturn(workflowExecutionTest); - doNothing().when(rlock).unlock(); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_AddHTTPHarvest() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - HTTPHarvestPluginMetadata httpHarvestPluginMetadata = new HTTPHarvestPluginMetadata(); - httpHarvestPluginMetadata.setUrl("http://harvest.url.org"); - httpHarvestPluginMetadata.setEnabled(true); - workflow.getMetisPluginsMetadata().set(0, httpHarvestPluginMetadata); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - when(redissonClient.getFairLock(anyString())).thenReturn(Mockito.mock(RLock.class)); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoHarvestPlugin() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - workflow.getMetisPluginsMetadata().remove(0); - - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - OaipmhHarvestPlugin oaipmhHarvestPlugin = (OaipmhHarvestPlugin) ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - oaipmhHarvestPlugin.setPluginMetadata(new OaipmhHarvestPluginMetadata()); - oaipmhHarvestPlugin.setStartedDate(new Date()); - ExecutionProgress executionProgress = new ExecutionProgress(); - executionProgress.setProcessedRecords(5); - oaipmhHarvestPlugin.setExecutionProgress(executionProgress); - when(validationUtils.validateWorkflowPlugins(workflow, null)) - .thenReturn(new PluginWithExecutionId<>("execution id", oaipmhHarvestPlugin)); - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(anyString())).thenReturn(rlock); - doNothing().when(rlock).lock(); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(rlock).unlock(); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoHarvestPlugin_NoProcessPlugin() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - when(redissonClient.getFairLock(anyString())).thenReturn(Mockito.mock(RLock.class)); - when(validationUtils.validateWorkflowPlugins(workflow, null)) - .thenThrow(new PluginExecutionNotAllowed("")); - assertThrows(PluginExecutionNotAllowed.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0)); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_EcloudDatasetAlreadyGenerated() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - dataset.setEcloudDatasetId("f525f64c-fea0-44bf-8c56-88f30962734c"); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - when(redissonClient.getFairLock(anyString())).thenReturn(Mockito.mock(RLock.class)); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_EcloudDatasetAlreadyExistsInEcloud() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - when(datasetDao.checkAndCreateDatasetInEcloud(any(Dataset.class))) - .thenReturn(UUID.randomUUID().toString()); - RLock rlock = mock(RLock.class); - when(redissonClient.getFairLock(anyString())).thenReturn(rlock); - doNothing().when(rlock).lock(); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(rlock).unlock(); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_EcloudDatasetCreationFails() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(workflow.getDatasetId())).thenReturn(workflow); - when(datasetDao.checkAndCreateDatasetInEcloud(any(Dataset.class))) - .thenReturn(UUID.randomUUID().toString()); - when(redissonClient.getFairLock(anyString())).thenReturn(Mockito.mock(RLock.class)); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())).thenReturn(null); - ObjectId objectId = new ObjectId(); - WorkflowExecution workflowExecutionTest = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecutionTest.setId(objectId); - when(workflowExecutionDao.create(any(WorkflowExecution.class))).thenReturn(workflowExecutionTest); - doNothing().when(workflowExecutorManager).addWorkflowExecutionToQueue(objectId.toString(), 0); - orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoDatasetFoundException() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, datasetId)) - .thenThrow(NoDatasetFoundException.class); - assertThrows(NoDatasetFoundException.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, datasetId, null, null, 0)); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoDatasetFoundException_Unauthorized() { - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(null); - assertThrows(NoDatasetFoundException.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutionsWithoutAuthorization(datasetId, null, null, 0)); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_NoWorkflowFoundException() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(dataset.getDatasetId())).thenReturn(null); - assertThrows(NoWorkflowFoundException.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0)); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_WorkflowIsEmpty() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = new Workflow(); - workflow.setDatasetId(dataset.getDatasetId()); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(dataset.getDatasetId())).thenReturn(workflow); - when(validationUtils.validateWorkflowPlugins(workflow, null)) - .thenThrow(new BadContentException("")); - assertThrows(BadContentException.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0)); - } - - @Test - void addWorkflowInQueueOfWorkflowExecutions_WorkflowExecutionAlreadyExistsException() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(authorizer.authorizeWriteExistingDatasetById(metisUserView, dataset.getDatasetId())) - .thenReturn(dataset); - when(workflowDao.getWorkflow(dataset.getDatasetId())).thenReturn(workflow); - when(redissonClient.getFairLock(anyString())).thenReturn(Mockito.mock(RLock.class)); - when(workflowExecutionDao.existsAndNotCompleted(dataset.getDatasetId())) - .thenReturn(new ObjectId().toString()); - assertThrows(WorkflowExecutionAlreadyExistsException.class, () -> orchestratorService - .addWorkflowInQueueOfWorkflowExecutions(metisUserView, dataset.getDatasetId(), null, null, 0)); - } - - @Test - void cancelWorkflowExecution() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(workflowExecution); - doNothing().when(workflowExecutionDao).setCancellingState(workflowExecution, null); - orchestratorService.cancelWorkflowExecution(metisUserView, TestObjectFactory.EXECUTIONID); - verify(authorizer, times(1)) - .authorizeWriteExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - } - - @Test - void cancelWorkflowExecution_NoWorkflowExecutionFoundException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> orchestratorService - .cancelWorkflowExecution(metisUserView, TestObjectFactory.EXECUTIONID)); - verifyNoMoreInteractions(workflowExecutorManager); - } - - @Test - void getWorkflowExecutionsPerRequest() { - orchestratorService.getWorkflowExecutionsPerRequest(); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - } - - @Test - void getLatestSuccessfulFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution_ProcessPlugin() - throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final AbstractExecutablePlugin oaipmhHarvestPlugin = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId)).thenReturn(null); - doReturn(new PluginWithExecutionId<>("execution ID", oaipmhHarvestPlugin)) - .when(dataEvolutionUtils) - .computePredecessorPlugin(ExecutablePluginType.VALIDATION_EXTERNAL, null, datasetId); - assertSame(oaipmhHarvestPlugin, orchestratorService - .getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution(metisUserView, datasetId, - ExecutablePluginType.VALIDATION_EXTERNAL, null)); - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - } - - @Test - void getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution_PluginExecutionNotAllowed() - throws NoDatasetFoundException, UserUnauthorizedException, PluginExecutionNotAllowed { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - when(authorizer.authorizeReadExistingDatasetById(metisUserView, datasetId)).thenReturn(null); - when(dataEvolutionUtils.computePredecessorPlugin(ExecutablePluginType.VALIDATION_EXTERNAL, null, - datasetId)).thenThrow(new PluginExecutionNotAllowed("")); - assertThrows(PluginExecutionNotAllowed.class, () -> orchestratorService - .getLatestFinishedPluginByDatasetIdIfPluginTypeAllowedForExecution(metisUserView, - datasetId, ExecutablePluginType.VALIDATION_EXTERNAL, null)); - } - - @Test - void getAllWorkflowExecutionsByDatasetId() throws GenericMetisException { - - // Define some constants - final int nextPage = 1; - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - final Set workflowStatuses = Collections.singleton(WorkflowStatus.INQUEUE); - - // Check with specific dataset ID: should query only that dataset. - doReturn(new ResultList<>(Collections.emptyList(), false)).when(workflowExecutionDao) - .getAllWorkflowExecutions(any(), any(), any(), anyBoolean(), anyInt(), anyInt(), anyBoolean()); - orchestratorService.getAllWorkflowExecutions(metisUserView, datasetId, workflowStatuses, - DaoFieldNames.ID, false, nextPage); - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)).getAllWorkflowExecutions( - Collections.singleton(datasetId), workflowStatuses, DaoFieldNames.ID, false, - nextPage, 1, false); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - verifyNoMoreInteractions(workflowExecutionDao); - } - - @Test - void getAllWorkflowExecutionsForRegularUser() throws GenericMetisException { - - // Define some constants - final int nextPage = 1; - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final Set datasetIds = new HashSet<>(Arrays.asList("A", "B", "C")); - final List datasets = datasetIds.stream().map(id -> { - final Dataset result = new Dataset(); - result.setDatasetId(id); - return result; - }).toList(); - final Set workflowStatuses = Collections.singleton(WorkflowStatus.INQUEUE); - - // Check for all datasets and for regular user: should query all datasets to which that user's - // organization has rights. - when(datasetDao.getAllDatasetsByOrganizationId(metisUserView.getOrganizationId())) - .thenReturn(datasets); - doReturn(new ResultList<>(Collections.emptyList(), false)).when(workflowExecutionDao) - .getAllWorkflowExecutions(any(), any(), any(), anyBoolean(), anyInt(), anyInt(), anyBoolean()); - orchestratorService.getAllWorkflowExecutions(metisUserView, null, workflowStatuses, - DaoFieldNames.CREATED_DATE, false, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)).getAllWorkflowExecutions(datasetIds, - workflowStatuses, DaoFieldNames.CREATED_DATE, false, nextPage, 1, false); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - verifyNoMoreInteractions(workflowExecutionDao); - } - - @Test - void getAllWorkflowExecutionsForAdmin() throws GenericMetisException { - - // Define some constants - final int nextPage = 1; - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final Set workflowStatuses = Collections.singleton(WorkflowStatus.INQUEUE); - - // Check for all datasets and for admin user: should query all datasets. - doReturn(AccountRole.METIS_ADMIN).when(metisUserView).getAccountRole(); - doReturn(new ResultList<>(Collections.emptyList(), false)).when(workflowExecutionDao) - .getAllWorkflowExecutions(any(), any(), any(), anyBoolean(), anyInt(), anyInt(), anyBoolean()); - orchestratorService.getAllWorkflowExecutions(metisUserView, null, workflowStatuses, - DaoFieldNames.CREATED_DATE, true, nextPage); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)).getAllWorkflowExecutions(isNull(), eq(workflowStatuses), - eq(DaoFieldNames.CREATED_DATE), eq(true), eq(nextPage), eq(1), eq(false)); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - verifyNoMoreInteractions(workflowExecutionDao); - } - - @Test - void getWorkflowExecutionOverviewForRegularUser() throws GenericMetisException { - - // Define some constants - final int nextPage = 1; - final int pageCount = 2; - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final Set datasetIds = new HashSet<>(Arrays.asList("A", "B", "C")); - final List datasets = datasetIds.stream().map(id -> { - final Dataset result = new Dataset(); - result.setDatasetId(id); - return result; - }).toList(); - final List data = TestObjectFactory.createExecutionsWithDatasets(4); - - // Check for all datasets and for regular user: should query all datasets to which that user's - // organization has rights. - when(datasetDao.getAllDatasetsByOrganizationId(metisUserView.getOrganizationId())) - .thenReturn(datasets); - when(workflowExecutionDao - .getWorkflowExecutionsOverview(eq(datasetIds), isNull(), isNull(), isNull(), isNull(), - eq(nextPage), eq(pageCount))) - .thenReturn(new ResultList<>(data, false)); - final List result = orchestratorService - .getWorkflowExecutionsOverview(metisUserView, null, null, null, null, nextPage, pageCount) - .getResults(); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)) - .getWorkflowExecutionsOverview(eq(datasetIds), isNull(), isNull(), isNull(), isNull(), - eq(nextPage), eq(pageCount)); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - verifyNoMoreInteractions(workflowExecutionDao); - assertEquals(data.size(), result.size()); - assertEquals(data.stream().map(ExecutionDatasetPair::getDataset).map(Dataset::getDatasetId) - .collect(Collectors.toList()), - result.stream().map(ExecutionAndDatasetView::getDataset) - .map(DatasetSummaryView::getDatasetId) - .collect(Collectors.toList())); - assertEquals(data.stream().map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).collect(Collectors.toList()), - result.stream().map(ExecutionAndDatasetView::getExecution) - .map(ExecutionSummaryView::getId).collect(Collectors.toList())); - } - - @Test - void getWorkflowExecutionOverviewForAdmin() throws GenericMetisException { - - // Define some constants - final int nextPage = 1; - final int pageCount = 2; - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final List data = TestObjectFactory.createExecutionsWithDatasets(4); - - // Check for all datasets and for admin user: should query all datasets. - doReturn(AccountRole.METIS_ADMIN).when(metisUserView).getAccountRole(); - when(workflowExecutionDao - .getWorkflowExecutionsOverview(isNull(), isNull(), isNull(), isNull(), isNull(), - eq(nextPage), eq(pageCount))) - .thenReturn(new ResultList<>(data, false)); - final List result = orchestratorService - .getWorkflowExecutionsOverview(metisUserView, null, null, null, null, nextPage, pageCount) - .getResults(); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)) - .getWorkflowExecutionsOverview(isNull(), isNull(), isNull(), isNull(), isNull(), - eq(nextPage), eq(pageCount)); - verify(workflowExecutionDao, times(1)).getWorkflowExecutionsPerRequest(); - verifyNoMoreInteractions(workflowExecutionDao); - assertEquals(data.size(), result.size()); - assertEquals(data.stream().map(ExecutionDatasetPair::getDataset).map(Dataset::getDatasetId) - .collect(Collectors.toList()), - result.stream().map(ExecutionAndDatasetView::getDataset) - .map(DatasetSummaryView::getDatasetId) - .collect(Collectors.toList())); - assertEquals(data.stream().map(ExecutionDatasetPair::getExecution).map(WorkflowExecution::getId) - .map(ObjectId::toString).collect(Collectors.toList()), - result.stream().map(ExecutionAndDatasetView::getExecution) - .map(ExecutionSummaryView::getId).collect(Collectors.toList())); - } - - @Test - void getDatasetExecutionInformation() throws GenericMetisException { - ExecutionProgress executionProgress = getExecutionProgress(100, 20); - final Date longEnoughToBeValidDate = DateUtils.modifyDateByTimeUnitAmount(new Date(), -(SOLR_COMMIT_PERIOD_IN_MINS + 3), - TimeUnit.MINUTES); - final Date notLongEnoughToBeValidDate = DateUtils.modifyDateByTimeUnitAmount(new Date(), -(SOLR_COMMIT_PERIOD_IN_MINS + 2), - TimeUnit.MINUTES); - - - // Create preview plugin - AbstractExecutablePlugin previewPlugin = ExecutablePluginFactory - .createPlugin(new IndexToPreviewPluginMetadata()); - previewPlugin.setFinishedDate(longEnoughToBeValidDate); - previewPlugin.setDataStatus(null); // Is default status, means valid. - previewPlugin.setExecutionProgress(executionProgress); - - // Create second publish plugin - AbstractExecutablePlugin lastPublishPlugin = ExecutablePluginFactory - .createPlugin(new IndexToPublishPluginMetadata()); - lastPublishPlugin.setFinishedDate(notLongEnoughToBeValidDate); - lastPublishPlugin.setDataStatus(null); // Is default status, means valid. - lastPublishPlugin.setExecutionProgress(executionProgress); - - boolean enableRunningPublish = true; - getDatasetExecutionInformation(previewPlugin, lastPublishPlugin, enableRunningPublish, true, false); - previewPlugin.getExecutionProgress().setTotalDatabaseRecords(100); - lastPublishPlugin.getExecutionProgress().setTotalDatabaseRecords(100); - lastPublishPlugin.setFinishedDate(longEnoughToBeValidDate); - enableRunningPublish = false; - getDatasetExecutionInformation(previewPlugin, lastPublishPlugin, enableRunningPublish, true, true); - previewPlugin.getExecutionProgress().setTotalDatabaseRecords(0); - lastPublishPlugin.getExecutionProgress().setTotalDatabaseRecords(0); - getDatasetExecutionInformation(previewPlugin, lastPublishPlugin, enableRunningPublish, false, false); - } - - private void getDatasetExecutionInformation( - AbstractExecutablePlugin previewPlugin, - AbstractExecutablePlugin lastPublishPlugin, boolean enableRunningPublish, - boolean previewReadyForViewing, - boolean publishReadyForViewing) throws GenericMetisException { - ExecutionProgress executionProgress = getExecutionProgress(100, 20); - - // Create harvest plugin. - AbstractExecutablePlugin oaipmhHarvestPlugin = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - oaipmhHarvestPlugin.setFinishedDate( - DateUtils.modifyDateByTimeUnitAmount(new Date(), -(SOLR_COMMIT_PERIOD_IN_MINS + 5), - TimeUnit.MINUTES)); - oaipmhHarvestPlugin.setDataStatus(null); // Is default status, means valid. - oaipmhHarvestPlugin.setExecutionProgress(executionProgress); - - // Create first publish plugin - AbstractExecutablePlugin firstPublishPlugin = ExecutablePluginFactory - .createPlugin(new IndexToPublishPluginMetadata()); - firstPublishPlugin.setFinishedDate( - DateUtils.modifyDateByTimeUnitAmount(new Date(), -(SOLR_COMMIT_PERIOD_IN_MINS + 4), - TimeUnit.MINUTES)); - firstPublishPlugin.setDataStatus(null); // Is default status, means valid. - firstPublishPlugin.setExecutionProgress(executionProgress); - final WorkflowExecution executionWithFirstPublishPlugin = TestObjectFactory - .createWorkflowExecutionObject(); - final List metisPluginsFirstPublish = executionWithFirstPublishPlugin - .getMetisPlugins(); - metisPluginsFirstPublish.add(firstPublishPlugin); - executionWithFirstPublishPlugin.setMetisPlugins(metisPluginsFirstPublish); - - final WorkflowExecution executionWithLastPublishPlugin = TestObjectFactory - .createWorkflowExecutionObject(); - final List metisPluginsLastPublish = executionWithLastPublishPlugin - .getMetisPlugins(); - metisPluginsLastPublish.add(lastPublishPlugin); - executionWithLastPublishPlugin.setMetisPlugins(metisPluginsLastPublish); - - // Create reindex to preview plugin - AbstractMetisPlugin reindexToPreviewPlugin = new ReindexToPreviewPlugin( - new ReindexToPreviewPluginMetadata()); - reindexToPreviewPlugin.setFinishedDate( - DateUtils.modifyDateByTimeUnitAmount(new Date(), -(SOLR_COMMIT_PERIOD_IN_MINS + 1), - TimeUnit.MINUTES)); - final WorkflowExecution executionWithReindexToPreview = TestObjectFactory - .createWorkflowExecutionObject(); - executionWithReindexToPreview.setMetisPlugins(List.of(reindexToPreviewPlugin)); - - // Create execution in progress with a publish plugin - final WorkflowExecution workflowExecutionObject = TestObjectFactory - .createWorkflowExecutionObject(); - workflowExecutionObject.setWorkflowStatus(WorkflowStatus.RUNNING); - final List metisPlugins = workflowExecutionObject.getMetisPlugins(); - final AbstractExecutablePlugin cleaningPublishPlugin = ExecutablePluginFactory - .createPlugin(new IndexToPublishPluginMetadata()); - cleaningPublishPlugin.setPluginStatus(PluginStatus.CLEANING); - metisPlugins.add(cleaningPublishPlugin); - workflowExecutionObject.setMetisPlugins(metisPlugins); - - // Mock the workflow execution - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.HTTP_HARVEST, ExecutablePluginType.OAIPMH_HARVEST), false)) - .thenReturn(new PluginWithExecutionId<>("", oaipmhHarvestPlugin)); - when(workflowExecutionDao.getFirstSuccessfulPlugin(datasetId, - EnumSet.of(PluginType.PUBLISH, PluginType.REINDEX_TO_PUBLISH))) - .thenReturn(new PluginWithExecutionId<>( - executionWithFirstPublishPlugin.getId().toString(), firstPublishPlugin)); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.PREVIEW), false)) - .thenReturn(new PluginWithExecutionId<>("", previewPlugin)); - when(workflowExecutionDao.getLatestSuccessfulPlugin(datasetId, - EnumSet.of(PluginType.PREVIEW, PluginType.REINDEX_TO_PREVIEW))) - .thenReturn( - new PluginWithExecutionId<>(executionWithReindexToPreview.getId().toString(), - reindexToPreviewPlugin)); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(datasetId, - EnumSet.of(ExecutablePluginType.PUBLISH), false)) - .thenReturn(new PluginWithExecutionId<>("", lastPublishPlugin)); - when(workflowExecutionDao.getLatestSuccessfulPlugin(datasetId, - EnumSet.of(PluginType.PUBLISH, PluginType.REINDEX_TO_PUBLISH))) - .thenReturn(new PluginWithExecutionId<>( - executionWithLastPublishPlugin.getId().toString(), lastPublishPlugin)); - if (enableRunningPublish) { - when(workflowExecutionDao.getRunningOrInQueueExecution(datasetId)) - .thenReturn(workflowExecutionObject); - } else { - when(workflowExecutionDao.getRunningOrInQueueExecution(datasetId)).thenReturn(null); - } - - DatasetExecutionInformation executionInfo = orchestratorService - .getDatasetExecutionInformation(metisUserView, datasetId); - - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - - assertEquals(oaipmhHarvestPlugin.getFinishedDate(), executionInfo.getLastHarvestedDate()); - assertEquals(reindexToPreviewPlugin.getFinishedDate(), executionInfo.getLastPreviewDate()); - assertEquals(firstPublishPlugin.getFinishedDate(), executionInfo.getFirstPublishedDate()); - assertEquals(lastPublishPlugin.getFinishedDate(), executionInfo.getLastPublishedDate()); - - assertEquals( - oaipmhHarvestPlugin.getExecutionProgress().getProcessedRecords() - oaipmhHarvestPlugin - .getExecutionProgress().getErrors(), executionInfo.getLastHarvestedRecords()); - assertEquals( - previewPlugin.getExecutionProgress().getProcessedRecords() - previewPlugin - .getExecutionProgress().getErrors(), executionInfo.getLastPreviewRecords()); - assertEquals( - lastPublishPlugin.getExecutionProgress().getProcessedRecords() - lastPublishPlugin - .getExecutionProgress().getErrors(), executionInfo.getLastPublishedRecords()); - - assertEquals(previewReadyForViewing, executionInfo.isLastPreviewRecordsReadyForViewing()); - assertEquals(publishReadyForViewing, executionInfo.isLastPublishedRecordsReadyForViewing()); - } - - @NotNull - private ExecutionProgress getExecutionProgress(int processedRecords, int errors) { - // Create execution progress object - ExecutionProgress executionProgress = new ExecutionProgress(); - executionProgress.setProcessedRecords(processedRecords); - executionProgress.setErrors(errors); - return executionProgress; - } - - @Test - void testGetDatasetExecutionHistory() throws GenericMetisException { - - // Create plugins - final AbstractExecutablePlugin plugin1 = mock(AbstractExecutablePlugin.class); - when(plugin1.getPluginType()).thenReturn(PluginType.OAIPMH_HARVEST); - when(plugin1.getPluginMetadata()).thenReturn(new HTTPHarvestPluginMetadata()); - final ExecutionProgress progress1 = getExecutionProgress(10, 1); - when(plugin1.getExecutionProgress()).thenReturn(progress1); - final AbstractExecutablePlugin plugin2 = mock(AbstractExecutablePlugin.class); - final ExecutionProgress progress2 = new ExecutionProgress(); - when(plugin2.getPluginType()).thenReturn(PluginType.TRANSFORMATION); - when(plugin2.getPluginMetadata()).thenReturn(new TransformationPluginMetadata()); - progress2.setProcessedRecords(10); - progress2.setErrors(10); - when(plugin2.getExecutionProgress()).thenReturn(progress2); - final AbstractExecutablePlugin plugin3 = mock(AbstractExecutablePlugin.class); - when(plugin3.getPluginType()).thenReturn(PluginType.MEDIA_PROCESS); - when(plugin2.getPluginMetadata()).thenReturn(new MediaProcessPluginMetadata()); - when(plugin3.getExecutionProgress()).thenReturn(null); - final ReindexToPreviewPlugin plugin4 = mock(ReindexToPreviewPlugin.class); - when(plugin4.getPluginType()).thenReturn(PluginType.REINDEX_TO_PUBLISH); - when(plugin4.getFinishedDate()).thenReturn(new Date(4)); - - // Create other objects - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = "dataset ID"; - final WorkflowExecution execution1 = createWorkflowExecution(metisUserView, datasetId, plugin1, - plugin2); - execution1.setStartedDate(new Date(12345)); - final WorkflowExecution execution2 = createWorkflowExecution(metisUserView, datasetId, plugin3); - final WorkflowExecution execution3 = createWorkflowExecution(metisUserView, datasetId, plugin4); - - // Mock the dao and call the method. - doReturn(new ResultList<>(List.of(execution1, execution2, execution3), false)) - .when(workflowExecutionDao).getAllWorkflowExecutions(any(), any(), any(), anyBoolean(), - anyInt(), any(), anyBoolean()); - final ExecutionHistory result = orchestratorService.getDatasetExecutionHistory(metisUserView, datasetId); - - // Verify the interactions - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - verify(workflowExecutionDao, times(1)).getAllWorkflowExecutions( - eq(Collections.singleton(datasetId)), isNull(), eq(DaoFieldNames.STARTED_DATE), eq(false), - eq(0), isNull(), eq(false)); - verifyNoMoreInteractions(workflowExecutionDao); - - // Verify the result - assertEquals(1, result.getExecutions().size()); - assertEquals(execution1.getId().toString(), - result.getExecutions().get(0).getWorkflowExecutionId()); - assertEquals(execution1.getStartedDate(), result.getExecutions().get(0).getStartedDate()); - } - - @Test - void testGetExecutablePluginsWithDataAvailability() throws GenericMetisException { - - // Create plugins - final AbstractExecutablePlugin plugin1 = mock(AbstractExecutablePlugin.class); - when(plugin1.getPluginType()).thenReturn(PluginType.OAIPMH_HARVEST); - when(plugin1.getPluginMetadata()).thenReturn(new HTTPHarvestPluginMetadata()); - final ExecutionProgress progress1 = getExecutionProgress(10, 1); - when(plugin1.getExecutionProgress()).thenReturn(progress1); - final AbstractExecutablePlugin plugin2 = mock(AbstractExecutablePlugin.class); - final ExecutionProgress progress2 = new ExecutionProgress(); - when(plugin2.getPluginType()).thenReturn(PluginType.TRANSFORMATION); - when(plugin2.getPluginMetadata()).thenReturn(new TransformationPluginMetadata()); - progress2.setProcessedRecords(10); - progress2.setErrors(10); - when(plugin2.getExecutionProgress()).thenReturn(progress2); - final AbstractExecutablePlugin plugin3 = mock(AbstractExecutablePlugin.class); - when(plugin3.getPluginType()).thenReturn(PluginType.MEDIA_PROCESS); - when(plugin2.getPluginMetadata()).thenReturn(new MediaProcessPluginMetadata()); - when(plugin3.getExecutionProgress()).thenReturn(null); - final ReindexToPreviewPlugin plugin4 = mock(ReindexToPreviewPlugin.class); - when(plugin4.getPluginType()).thenReturn(PluginType.REINDEX_TO_PUBLISH); - when(plugin4.getFinishedDate()).thenReturn(new Date(4)); - - // Create other objects - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = "dataset ID"; - final WorkflowExecution execution = createWorkflowExecution(metisUserView, datasetId, plugin1, - plugin2, plugin3, plugin4); - final String workflowExecutionId = execution.getId().toString(); - - // Test happy flow - final PluginsWithDataAvailability result = orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, workflowExecutionId); - assertNotNull(result); - assertNotNull(result.getPlugins()); - assertEquals(1, result.getPlugins().size()); - assertEquals(plugin1.getPluginType(), result.getPlugins().get(0).getPluginType()); - assertTrue(result.getPlugins().get(0).isCanDisplayRawXml()); - - // Test when the workflow execution does not exist - doReturn(null).when(orchestratorService) - .getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId); - assertThrows(NoWorkflowExecutionFoundException.class, () -> orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, workflowExecutionId)); - - // Test when the user is not allowed - when(orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)) - .thenAnswer(invocation -> { - throw new UserUnauthorizedException(""); - }); - assertThrows(UserUnauthorizedException.class, () -> orchestratorService - .getExecutablePluginsWithDataAvailability(metisUserView, workflowExecutionId)); - } - - @Test - void testGetRecordEvolutionForVersionExceptions() throws GenericMetisException { - - // Create some objects - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String workflowExecutionId = "workflow execution ID"; - final PluginType pluginType = PluginType.MEDIA_PROCESS; - final WorkflowExecution workflowExecution = mock(WorkflowExecution.class); - - // Test when the workflow execution does not exist - when(orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)) - .thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> orchestratorService - .getRecordEvolutionForVersion(metisUserView, workflowExecutionId, pluginType)); - - // Test when the user is not allowed - when(orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId)) - .thenAnswer(invocation -> { - throw new UserUnauthorizedException(""); - }); - assertThrows(UserUnauthorizedException.class, () -> orchestratorService - .getRecordEvolutionForVersion(metisUserView, workflowExecutionId, pluginType)); - - // Test when the workflow execution does not have a plugin of the right type - doReturn(workflowExecution).when(orchestratorService) - .getWorkflowExecutionByExecutionId(metisUserView, workflowExecutionId); - when(workflowExecution.getMetisPluginWithType(pluginType)).thenReturn(Optional.empty()); - assertThrows(NoWorkflowExecutionFoundException.class, () -> orchestratorService - .getRecordEvolutionForVersion(metisUserView, workflowExecutionId, pluginType)); - } - - @Test - void testGetRecordEvolutionForVersionHappyFlow() throws GenericMetisException { - - // Create two workflow executions with three plugins and link them together - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = "dataset ID"; - final AbstractExecutablePlugin plugin1 = createMetisPlugin(ExecutablePluginType.OAIPMH_HARVEST, - new Date(1)); - final AbstractExecutablePlugin plugin2 = createMetisPlugin(ExecutablePluginType.TRANSFORMATION, - new Date(2)); - final AbstractExecutablePlugin plugin3 = createMetisPlugin(ExecutablePluginType.MEDIA_PROCESS, - new Date(3)); - final WorkflowExecution execution1 = createWorkflowExecution(metisUserView, datasetId, plugin1); - final WorkflowExecution execution2 = createWorkflowExecution(metisUserView, datasetId, plugin2, - plugin3); - - // Mock the methods in workflow utils. - final List> evolutionWithContent = Arrays - .asList( - ImmutablePair.of(plugin1, execution1), ImmutablePair.of(plugin2, execution2)); - doReturn(evolutionWithContent).when(dataEvolutionUtils).compileVersionEvolution(plugin3, execution2); - doReturn(new ArrayList<>()).when(dataEvolutionUtils).compileVersionEvolution(plugin1, execution1); - - // Execute the call and expect an evolution with content. - final VersionEvolution resultForThree = orchestratorService.getRecordEvolutionForVersion( - metisUserView, execution2.getId().toString(), plugin3.getPluginType()); - assertNotNull(resultForThree); - assertNotNull(resultForThree.getEvolutionSteps()); - assertEquals(2, resultForThree.getEvolutionSteps().size()); - assertEvolutionStepEquals(resultForThree.getEvolutionSteps().get(0), execution1, plugin1); - assertEvolutionStepEquals(resultForThree.getEvolutionSteps().get(1), execution2, plugin2); - - // Execute the call and expect an evolution without content. - final VersionEvolution resultForOne = orchestratorService.getRecordEvolutionForVersion( - metisUserView, execution1.getId().toString(), plugin1.getPluginType()); - assertNotNull(resultForOne); - assertNotNull(resultForOne.getEvolutionSteps()); - assertTrue(resultForOne.getEvolutionSteps().isEmpty()); - } - - private void assertEvolutionStepEquals(VersionEvolutionStep evolutionStep, - WorkflowExecution execution, AbstractExecutablePlugin plugin) { - assertNotNull(evolutionStep); - assertEquals(plugin.getFinishedDate(), evolutionStep.getFinishedTime()); - assertEquals(plugin.getPluginMetadata().getExecutablePluginType(), - evolutionStep.getPluginType()); - assertEquals(execution.getId().toString(), evolutionStep.getWorkflowExecutionId()); - } - - private WorkflowExecution createWorkflowExecution(MetisUserView metisUserView, String datasetId, - AbstractMetisPlugin... plugins) throws GenericMetisException { - final WorkflowExecution result = new WorkflowExecution(); - result.setId(new ObjectId()); - result.setDatasetId(datasetId); - result.setMetisPlugins(Arrays.asList(plugins)); - when(orchestratorService.getWorkflowExecutionByExecutionId(metisUserView, - result.getId().toString())).thenReturn(result); - return result; - } - - private AbstractExecutablePlugin createMetisPlugin(ExecutablePluginType type, Date date) { - AbstractExecutablePlugin result = mock( - AbstractExecutablePlugin.class); - AbstractExecutablePluginMetadata metadata = mock(AbstractExecutablePluginMetadata.class); - when(metadata.getExecutablePluginType()).thenReturn(type); - when(result.getPluginType()).thenReturn(type.toPluginType()); - when(result.getPluginMetadata()).thenReturn(metadata); - when(result.getFinishedDate()).thenReturn(date); - return result; - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java deleted file mode 100644 index 52155a72d7..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestProxiesService.java +++ /dev/null @@ -1,667 +0,0 @@ -package eu.europeana.metis.core.service; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import eu.europeana.cloud.client.dps.rest.DpsClient; -import eu.europeana.cloud.client.uis.rest.UISClient; -import eu.europeana.cloud.common.model.File; -import eu.europeana.cloud.common.model.Representation; -import eu.europeana.cloud.common.model.Revision; -import eu.europeana.cloud.common.model.dps.NodeReport; -import eu.europeana.cloud.common.model.dps.StatisticsReport; -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.cloud.common.response.CloudTagsResponse; -import eu.europeana.cloud.mcs.driver.DataSetServiceClient; -import eu.europeana.cloud.mcs.driver.FileServiceClient; -import eu.europeana.cloud.mcs.driver.RecordServiceClient; -import eu.europeana.cloud.service.dps.exception.DpsException; -import eu.europeana.cloud.service.mcs.exception.MCSException; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.exceptions.NoWorkflowExecutionFoundException; -import eu.europeana.metis.core.rest.ListOfIds; -import eu.europeana.metis.core.rest.PaginatedRecordsResponse; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.rest.RecordsResponse; -import eu.europeana.metis.core.rest.stats.NodePathStatistics; -import eu.europeana.metis.core.rest.stats.RecordStatistics; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.MetisPlugin; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.core.workflow.plugins.Topology; -import eu.europeana.metis.exception.ExternalTaskException; -import eu.europeana.metis.exception.GenericMetisException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-02-26 - */ -class TestProxiesService { - - private static final long EXTERNAL_TASK_ID = 2070373127078497810L; - - private static ProxiesService proxiesService; - private static WorkflowExecutionDao workflowExecutionDao; - private static DpsClient dpsClient; - private static UISClient uisClient; - private static DataSetServiceClient ecloudDataSetServiceClient; - private static RecordServiceClient recordServiceClient; - private static FileServiceClient fileServiceClient; - private static Authorizer authorizer; - private static ProxiesHelper proxiesHelper; - - @BeforeAll - static void prepare() { - workflowExecutionDao = mock(WorkflowExecutionDao.class); - ecloudDataSetServiceClient = mock(DataSetServiceClient.class); - recordServiceClient = mock(RecordServiceClient.class); - fileServiceClient = mock(FileServiceClient.class); - dpsClient = mock(DpsClient.class); - uisClient = mock(UISClient.class); - authorizer = mock(Authorizer.class); - proxiesHelper = mock(ProxiesHelper.class); - - proxiesService = spy(new ProxiesService(workflowExecutionDao, ecloudDataSetServiceClient, - recordServiceClient, fileServiceClient, dpsClient, uisClient,"ecloudProvider", authorizer, proxiesHelper)); - } - - @AfterEach - void cleanUp() { - reset(workflowExecutionDao); - reset(ecloudDataSetServiceClient); - reset(recordServiceClient); - reset(fileServiceClient); - reset(dpsClient); - reset(uisClient); - reset(authorizer); - reset(proxiesHelper); - reset(proxiesService); - } - - @Test - void getExternalTaskLogs() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - List listOfSubTaskInfo = TestObjectFactory.createListOfSubTaskInfo(); - - when(dpsClient - .getDetailedTaskReportBetweenChunks(Topology.OAIPMH_HARVEST.getTopologyName(), - EXTERNAL_TASK_ID, - 1, 100)).thenReturn(listOfSubTaskInfo); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - proxiesService.getExternalTaskLogs(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - EXTERNAL_TASK_ID, 1, 100); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - assertEquals(2, listOfSubTaskInfo.size()); - assertNull(listOfSubTaskInfo.get(0).getAdditionalInformations()); - assertNull(listOfSubTaskInfo.get(1).getAdditionalInformations()); - } - - @Test - void getExternalTaskLogs_NoExecutionException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getExternalTaskLogs(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - EXTERNAL_TASK_ID, 1, 100)); - } - - @Test - void getExternalTaskLogs_ExternalTaskException() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(dpsClient - .getDetailedTaskReportBetweenChunks(Topology.OAIPMH_HARVEST.getTopologyName(), - EXTERNAL_TASK_ID, 1, 100)).thenThrow(new DpsException()); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - assertThrows(ExternalTaskException.class, () -> proxiesService - .getExternalTaskLogs(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - EXTERNAL_TASK_ID, 1, 100)); - } - - @Test - void existsExternalTaskReport() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - - when(dpsClient.checkIfErrorReportExists(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID)).thenReturn(true).thenThrow(DpsException.class); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - - final boolean existsExternalTaskReport = proxiesService.existsExternalTaskReport(metisUserView, - Topology.OAIPMH_HARVEST.getTopologyName(), TestObjectFactory.EXTERNAL_TASK_ID); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - assertTrue(existsExternalTaskReport); - - assertThrows(ExternalTaskException.class, () -> proxiesService.existsExternalTaskReport(metisUserView, - Topology.OAIPMH_HARVEST.getTopologyName(), TestObjectFactory.EXTERNAL_TASK_ID)); - - } - - @Test - void getExternalTaskReport() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - TaskErrorsInfo taskErrorsInfo = TestObjectFactory.createTaskErrorsInfoListWithoutIdentifiers(2); - TaskErrorsInfo taskErrorsInfoWithIdentifiers = TestObjectFactory - .createTaskErrorsInfoWithIdentifiers(taskErrorsInfo.getErrors().get(0).getErrorType(), - taskErrorsInfo.getErrors().get(0).getMessage()); - - when(dpsClient - .getTaskErrorsReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, null, 10)) - .thenReturn(taskErrorsInfoWithIdentifiers); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - - TaskErrorsInfo externalTaskReport = proxiesService.getExternalTaskReport(metisUserView, - Topology.OAIPMH_HARVEST.getTopologyName(), TestObjectFactory.EXTERNAL_TASK_ID, 10); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - assertEquals(1, externalTaskReport.getErrors().size()); - assertFalse(externalTaskReport.getErrors().get(0).getErrorDetails().isEmpty()); - } - - @Test - void getExternalTaskReport_NoExecutionException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getExternalTaskReport(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, 10)); - } - - @Test - void getExternalTaskReport_ExternalTaskException() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(dpsClient - .getTaskErrorsReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, null, 10)) - .thenThrow(new DpsException()); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - assertThrows(ExternalTaskException.class, () -> proxiesService - .getExternalTaskReport(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, 10)); - } - - private Pair, ExecutablePluginType> getUsedAndUnusedPluginType( - WorkflowExecution execution) { - final Set usedPluginTypes = execution.getMetisPlugins().stream() - .map(AbstractMetisPlugin::getPluginType).collect(Collectors.toSet()); - final PluginType usedPluginType = usedPluginTypes.stream().findAny() - .orElseThrow(IllegalStateException::new); - final Map executablePluginTypes = Stream - .of(ExecutablePluginType.values()) - .collect(Collectors.toMap(ExecutablePluginType::toPluginType, Function.identity())); - final ExecutablePluginType unusedPluginType = Stream.of(PluginType.values()) - .filter(type -> !usedPluginTypes.contains(type)) - .map(executablePluginTypes::get).filter( - Objects::nonNull).findAny().orElseThrow(IllegalStateException::new); - return new ImmutablePair<>( - (AbstractExecutablePlugin) execution.getMetisPluginWithType(usedPluginType) - .filter(plugin -> plugin instanceof AbstractExecutablePlugin) - .orElseThrow(IllegalStateException::new), unusedPluginType); - } - - @Test - void getExternalTaskStatistics_NoExecutionException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getExternalTaskStatistics(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID)); - } - - @Test - void getExternalTaskStatistics_ExternalTaskException() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(dpsClient.getTaskStatisticsReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID)).thenThrow(new DpsException()); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - assertThrows(ExternalTaskException.class, () -> proxiesService - .getExternalTaskStatistics(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID)); - } - - @Test - void getAdditionalNodeStatistics() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String nodePath = "node path"; - final List nodeReportList = new ArrayList<>(); - when(dpsClient.getElementReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, nodePath)).thenReturn(nodeReportList); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - final NodePathStatistics nodePathStatistics = new NodePathStatistics(); - when(proxiesHelper.compileNodePathStatistics(nodePath, nodeReportList)).thenReturn(nodePathStatistics); - final NodePathStatistics result = proxiesService - .getAdditionalNodeStatistics(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, nodePath); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - assertSame(nodePathStatistics, result); - } - - @Test - void getAdditionalNodeStatistics_NoExecutionException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getAdditionalNodeStatistics(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, "node path")); - } - - @Test - void getAdditionalNodeStatistics_ExternalTaskException() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String nodePath = "node path"; - when(dpsClient.getElementReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, nodePath)).thenThrow(new DpsException()); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - assertThrows(ExternalTaskException.class, () -> proxiesService - .getAdditionalNodeStatistics(metisUserView, Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID, nodePath)); - } - - @Test - void getExternalTaskStatistics() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final StatisticsReport taskStatistics = TestObjectFactory.createTaskStatisticsReport(); - when(dpsClient.getTaskStatisticsReport(Topology.OAIPMH_HARVEST.getTopologyName(), - TestObjectFactory.EXTERNAL_TASK_ID)).thenReturn(taskStatistics); - final WorkflowExecution workflowExecution = TestObjectFactory.createWorkflowExecutionObject(); - when(workflowExecutionDao.getByExternalTaskId(EXTERNAL_TASK_ID)).thenReturn(workflowExecution); - final RecordStatistics recordStatistics = new RecordStatistics(); - when(proxiesHelper.compileRecordStatistics(taskStatistics)).thenReturn(recordStatistics); - final RecordStatistics result = proxiesService.getExternalTaskStatistics(metisUserView, - Topology.OAIPMH_HARVEST.getTopologyName(), TestObjectFactory.EXTERNAL_TASK_ID); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, workflowExecution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - assertSame(recordStatistics, result); - } - - // TODO: add tests for searchRecordByIdFromPluginExecution - - @Test - void getListOfFileContentsFromPluginExecution() throws Exception { - - // Create execution and plugin - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final WorkflowExecution execution = TestObjectFactory.createWorkflowExecutionObject(); - execution.getMetisPlugins() - .forEach(abstractMetisPlugin -> abstractMetisPlugin.setStartedDate(new Date())); - final AbstractExecutablePlugin plugin = getUsedAndUnusedPluginType(execution).getLeft(); - doReturn(new ImmutablePair<>(execution, plugin)).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType()); - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(execution); - - // Mock getting the records from eCloud. - final String ecloudId = "ECLOUDID1"; - final CloudTagsResponse cloudTagsResponse = new CloudTagsResponse(ecloudId, false); - when(ecloudDataSetServiceClient - .getRevisionsWithDeletedFlagSetToFalse(anyString(), anyString(), anyString(), anyString(), anyString(), - anyString(), anyInt())).thenReturn(Collections.singletonList(cloudTagsResponse)); - - // Mock obtaining the actual record. - final Record record = new Record(ecloudId, "test content"); - doReturn(record).when(proxiesService).getRecord(plugin, ecloudId); - - // Execute the call. - PaginatedRecordsResponse listOfFileContentsFromPluginExecution = proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), null, 5); - assertEquals(record.getXmlRecord(), - listOfFileContentsFromPluginExecution.getRecords().get(0).getXmlRecord()); - assertEquals(ecloudId, listOfFileContentsFromPluginExecution.getRecords().get(0).getEcloudId()); - - // If the actual record could not be gotten, we need to see an exception. - doReturn(null).when(proxiesService).getRecord(plugin, ecloudId); - assertThrows(IllegalStateException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), null, 5)); - } - - @Test - void getListOfFileContentsFromPluginExecution_ExceptionOfDataAvailability() throws GenericMetisException { - - // If there is no execution - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final ExecutablePluginType pluginType = ExecutablePluginType.OAIPMH_HARVEST; - doThrow(NoWorkflowExecutionFoundException.class).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, null, 5)); - - // If the user has no rights - doThrow(UserUnauthorizedException.class).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, pluginType); - assertThrows(UserUnauthorizedException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, null, 5)); - - // If the execution does not have the plugin an empty result should be returned. - doReturn(null).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, pluginType); - final PaginatedRecordsResponse result = proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, null, 5); - assertNotNull(result); - assertNotNull(result.getRecords()); - assertTrue(result.getRecords().isEmpty()); - assertNull(result.getNextPage()); - } - - @Test - void getListOfFileContentsFromPluginExecution_ExceptionRequestingRevisions() throws Exception { - - // Create execution and plugin and mock relevant method getting them. - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final WorkflowExecution execution = TestObjectFactory.createWorkflowExecutionObject(); - execution.getMetisPlugins() - .forEach(abstractMetisPlugin -> abstractMetisPlugin.setStartedDate(new Date())); - final AbstractExecutablePlugin plugin = getUsedAndUnusedPluginType(execution).getLeft(); - doReturn(new ImmutablePair<>(execution, plugin)).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType()); - - // Mock ecloud client method. - when(ecloudDataSetServiceClient - .getRevisionsWithDeletedFlagSetToFalse(anyString(), anyString(), anyString(), anyString(), - anyString(), anyString(), anyInt())) - .thenThrow(new MCSException("Chunk cannot be retrieved")); - - // Check exception. - assertThrows(ExternalTaskException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), null, 5)); - } - - @Test - void testGetListOfFileContentsFromPluginExecution() throws GenericMetisException { - - // Create execution and plugin and mock relevant method getting them. - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final WorkflowExecution execution = TestObjectFactory.createWorkflowExecutionObject(); - execution.getMetisPlugins() - .forEach(abstractMetisPlugin -> abstractMetisPlugin.setStartedDate(new Date())); - final AbstractExecutablePlugin plugin = getUsedAndUnusedPluginType(execution).getLeft(); - doReturn(new ImmutablePair<>(execution, plugin)).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType()); - - // Create the test records and the list of IDs. - final Record record1 = new Record("ID 1", "test content 1"); - final Record record2 = new Record("ID 2", "test content 2"); - final Record record3 = new Record("ID 3", "test content 3"); - final List idList = Stream.of(record1, record2, record3).map(Record::getEcloudId) - .toList(); - - // Mock the method for getting records - doReturn(record1).when(proxiesService).getRecord(plugin, record1.getEcloudId()); - doReturn(record2).when(proxiesService).getRecord(plugin, record2.getEcloudId()); - doReturn(record3).when(proxiesService).getRecord(plugin, record3.getEcloudId()); - - // Make the call - happy flow - final ListOfIds input = new ListOfIds(); - input.setIds(idList); - final RecordsResponse result = proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), input); - - // Verify that the result contains the record in the right order - assertNotNull(result); - assertNotNull(result.getRecords()); - assertEquals(idList.size(), result.getRecords().size()); - assertEquals(idList, - result.getRecords().stream().map(Record::getEcloudId).collect(Collectors.toList())); - - // Check that the call also works for an empty list - input.setIds(Collections.emptyList()); - final RecordsResponse emptyResult = proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), input); - assertNotNull(emptyResult); - assertNotNull(emptyResult.getRecords()); - assertTrue(emptyResult.getRecords().isEmpty()); - input.setIds(idList); - - // Check that if a record does not exist, the method still returns with the other records. - doReturn(null).when(proxiesService).getRecord(plugin, record3.getEcloudId()); - final RecordsResponse resultWithMissingRecord = proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), input); - assertNotNull(resultWithMissingRecord); - assertNotNull(resultWithMissingRecord.getRecords()); - assertEquals(idList.size() - 1, resultWithMissingRecord.getRecords().size()); - assertTrue(new HashSet<>(idList).containsAll(resultWithMissingRecord.getRecords().stream() - .map(Record::getEcloudId).collect(Collectors.toList()))); - - // Check that if a record cannot be retrieved, the method fails. - doThrow(ExternalTaskException.class).when(proxiesService) - .getRecord(plugin, record3.getEcloudId()); - assertThrows(ExternalTaskException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType(), input)); - } - - @Test - void testGetListOfFileContentsFromPluginExecution_ExceptionOfDataAvailability() throws GenericMetisException { - - // If there is no execution - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final ExecutablePluginType pluginType = ExecutablePluginType.OAIPMH_HARVEST; - doThrow(NoWorkflowExecutionFoundException.class).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, new ListOfIds())); - - // If the user has no rights - doThrow(UserUnauthorizedException.class).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, pluginType); - assertThrows(UserUnauthorizedException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, new ListOfIds())); - - // If the execution does not have the plugin an empty result should be returned. - doReturn(null).when(proxiesService) - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, pluginType); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getListOfFileContentsFromPluginExecution(metisUserView, TestObjectFactory.EXECUTIONID, - pluginType, new ListOfIds())); - } - - @Test - void testGetExecutionAndPlugin() throws GenericMetisException { - - // Create a workflowExecution and get the plugin types - final WorkflowExecution execution = TestObjectFactory.createWorkflowExecutionObject(); - final Pair, ExecutablePluginType> pluginAndUnusedType = getUsedAndUnusedPluginType( - execution); - final ExecutablePluginType unusedPluginType = pluginAndUnusedType.getRight(); - final AbstractExecutablePlugin plugin = pluginAndUnusedType.getLeft(); - - // Create a user and mock the dependency methods. - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(execution); - doReturn(null).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, execution.getDatasetId()); - - // Test happy flow with result - final Pair result = proxiesService - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType()); - assertNotNull(result); - assertEquals(execution, result.getLeft()); - assertNotNull(result.getRight()); - assertSame(plugin, result.getRight()); - verify(authorizer, times(1)) - .authorizeReadExistingDatasetById(metisUserView, execution.getDatasetId()); - verifyNoMoreInteractions(authorizer); - - // Test happy flow without result - assertNull(proxiesService - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, unusedPluginType)); - - // Test execution not found - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(null); - assertThrows(NoWorkflowExecutionFoundException.class, () -> proxiesService - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType())); - when(workflowExecutionDao.getById(TestObjectFactory.EXECUTIONID)).thenReturn(execution); - - // Test unauthorized exception - when(authorizer.authorizeReadExistingDatasetById(metisUserView, execution.getDatasetId())) - .thenThrow(UserUnauthorizedException.class); - assertThrows(UserUnauthorizedException.class, () -> proxiesService - .getExecutionAndPlugin(metisUserView, TestObjectFactory.EXECUTIONID, - plugin.getPluginMetadata().getExecutablePluginType())); - doReturn(null).when(authorizer) - .authorizeReadExistingDatasetById(metisUserView, execution.getDatasetId()); - } - - @Test - void testGetRecord() throws MCSException, ExternalTaskException { - - // Create representation - final Representation representation = mock(Representation.class); - final String contentUri = "https://example.com"; - final File file = new File(); - file.setContentUri(URI.create(contentUri)); - when(representation.getFiles()).thenReturn(Collections.singletonList(file)); - - // Create plugin - final PluginType pluginType = PluginType.MEDIA_PROCESS; - final ExecutablePlugin plugin = mock(ExecutablePlugin.class); - when(plugin.getPluginType()).thenReturn(pluginType); - when(plugin.getStartedDate()).thenReturn(new Date()); - - // Configure mocks - final String ecloudId = "ecloud ID"; - final String ecloudProvider = proxiesService.getEcloudProvider(); - final Date startedDate = plugin.getStartedDate(); - final Revision revision = new Revision(pluginType.name(), ecloudProvider, startedDate); - doReturn(Collections.singletonList(representation)).when(recordServiceClient) - .getRepresentationsByRevision(ecloudId, - MetisPlugin.getRepresentationName(), revision); - final String testContent = "test content"; - when(fileServiceClient.getFile(contentUri)) - .thenReturn(new ByteArrayInputStream(testContent.getBytes(StandardCharsets.UTF_8))); - - // Test happy flow - final Record result = proxiesService.getRecord(plugin, ecloudId); - assertNotNull(result); - assertEquals(ecloudId, result.getEcloudId()); - assertEquals(testContent, result.getXmlRecord()); - - // When the file service client returns an invalid input stream - when(fileServiceClient.getFile(contentUri)).thenReturn(new InputStream() { - @Override - public int read() throws IOException { - throw new IOException("Test generated exception"); - } - }); - assertThrows(ExternalTaskException.class, () -> proxiesService.getRecord(plugin, ecloudId)); - - // When the file service client throws exception - when(fileServiceClient.getFile(contentUri)).thenThrow(new MCSException()); - assertThrows(ExternalTaskException.class, () -> proxiesService.getRecord(plugin, ecloudId)); - - doReturn(new ByteArrayInputStream(testContent.getBytes(StandardCharsets.UTF_8))) - .when(fileServiceClient).getFile(contentUri); - proxiesService.getRecord(plugin, ecloudId); - - // When the file service client throws exception - doReturn(null).when(recordServiceClient) - .getRepresentationsByRevision(ecloudId, MetisPlugin.getRepresentationName(), - revision); - assertNull(proxiesService.getRecord(plugin, ecloudId)); - doReturn(Collections.emptyList()).when(recordServiceClient) - .getRepresentationsByRevision(ecloudId, MetisPlugin.getRepresentationName(), - revision); - assertNull(proxiesService.getRecord(plugin, ecloudId)); - doReturn(Arrays.asList(representation, representation)).when(recordServiceClient).getRepresentationsByRevision(ecloudId, - MetisPlugin.getRepresentationName(), revision); - - // When the record service client returns an exception or an unexpected list size. - proxiesService.getRecord(plugin, ecloudId); - when(recordServiceClient.getRepresentationsByRevision(anyString(), anyString(), - any(Revision.class))).thenThrow(MCSException.class); - assertThrows(ExternalTaskException.class, () -> proxiesService.getRecord(plugin, ecloudId)); - doReturn(Collections.singletonList(representation)).when(recordServiceClient).getRepresentationsByRevision(ecloudId, - MetisPlugin.getRepresentationName(), revision); - proxiesService.getRecord(plugin, ecloudId); - - // When the revision has an unexpected number of files - when(representation.getFiles()).thenReturn(null); - assertThrows(ExternalTaskException.class, () -> proxiesService.getRecord(plugin, ecloudId)); - when(representation.getFiles()).thenReturn(Collections.emptyList()); - assertThrows(ExternalTaskException.class, () -> proxiesService.getRecord(plugin, ecloudId)); - when(representation.getFiles()).thenReturn(Arrays.asList(file, file)); - proxiesService.getRecord(plugin, ecloudId); - when(representation.getFiles()).thenReturn(Collections.singletonList(file)); - proxiesService.getRecord(plugin, ecloudId); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirection.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirection.java deleted file mode 100644 index 656d3304af..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirection.java +++ /dev/null @@ -1,129 +0,0 @@ -package eu.europeana.metis.core.service; - -import static eu.europeana.metis.core.service.TestRedirectionBase.getIndexToPublishPluginMetadata; -import static eu.europeana.metis.core.service.TestRedirectionBase.getTestDataset; -import static eu.europeana.metis.core.service.TestRedirectionBase.getValidationExternalProperties; -import static eu.europeana.metis.core.service.TestRedirectionBase.getValidationInternalProperties; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflow; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflowFromNormalization; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflowPostReindex; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflowPreReindex; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.DatasetXsltDao; -import eu.europeana.metis.core.dao.DepublishRecordIdDao; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import eu.europeana.metis.exception.BadContentException; -import java.time.Instant; -import java.util.Date; -import java.util.List; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class TestRedirection { - - private WorkflowExecutionDao workflowExecutionDao; - private DataEvolutionUtils dataEvolutionUtils; - private WorkflowExecutionFactory workflowExecutionFactory; - - @BeforeEach - void setup() { - workflowExecutionDao = mock(WorkflowExecutionDao.class); - dataEvolutionUtils = mock(DataEvolutionUtils.class); - DatasetXsltDao datasetXsltDao = mock(DatasetXsltDao.class); - DepublishRecordIdDao depublishRecordIdDao = mock(DepublishRecordIdDao.class); - RedirectionInferrer redirectionInferrer = new RedirectionInferrer(workflowExecutionDao, dataEvolutionUtils); - - workflowExecutionFactory = new WorkflowExecutionFactory(datasetXsltDao, depublishRecordIdDao, redirectionInferrer); - } - - @Test - void redirectionReviewWithPerformRedirectsWhenAncestorRootIsDifferent() throws BadContentException { - final int priority = 0; - final Dataset dataset = getTestDataset(); - final WorkflowExecution workflowExecutionPre = getWorkflowPreReindex(dataset); - final WorkflowExecution workflowExecutionPost = getWorkflowPostReindex(dataset); - - final PluginWithExecutionId httpHarvestPluginWithExecutionId = - new PluginWithExecutionId<>("executionIdH1", - (ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.HTTP_HARVEST).get()); - - final PluginWithExecutionId httpHarvestPluginWithExecutionId2 = - new PluginWithExecutionId<>("executionIdH2", - (ExecutablePlugin) workflowExecutionPost.getMetisPluginWithType(PluginType.HTTP_HARVEST).get()); - - final PluginWithExecutionId indexToPublishPluginWithExecutionId = - new PluginWithExecutionId<>("executionIdV1", - (ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.PUBLISH).get()); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), anyBoolean())) - .thenReturn(indexToPublishPluginWithExecutionId); - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionIdV2", - (ExecutablePlugin) workflowExecutionPost.getMetisPluginWithType(PluginType.VALIDATION_INTERNAL).get()); - - when(dataEvolutionUtils.getRootAncestor(indexToPublishPluginWithExecutionId)) - .thenReturn(httpHarvestPluginWithExecutionId); - when(dataEvolutionUtils.getRootAncestor(predecessor)) - .thenReturn(httpHarvestPluginWithExecutionId2); - - final Workflow workflow = getWorkflowFromNormalization(dataset); - - workflowExecutionFactory.setValidationInternalProperties(getValidationInternalProperties()); - workflowExecutionFactory.setValidationExternalProperties(getValidationExternalProperties()); - final WorkflowExecution workflowExecution = workflowExecutionFactory.createWorkflowExecution(workflow, dataset, predecessor, - priority); - - final AbstractMetisPlugin abstractMetisPlugin = workflowExecution.getMetisPluginWithType( - PluginType.PUBLISH).get(); - assertTrue(abstractMetisPlugin.getPluginMetadata().isPerformRedirects()); - } - - @Test - void redirectionReviewWithPerformRedirectsWhenRedirectIdsPresent() throws BadContentException { - final int priority = 0; - final Dataset dataset = getTestDataset(); - final WorkflowExecution workflowExecutionPre = getWorkflowPreReindex(dataset); - - final PluginWithExecutionId indexToPublishPluginPluginWithExecutionId = - new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.PUBLISH).get()); - - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), anyBoolean())) - .thenReturn(indexToPublishPluginPluginWithExecutionId); - - final ObjectId objectId = new ObjectId(); - final Workflow workflow = getWorkflow(objectId, getIndexToPublishPluginMetadata(Date.from(Instant.now()), - ((ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.PREVIEW).get()).getPluginMetadata() - .getRevisionNamePreviousPlugin(), - ((ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.PREVIEW).get()).getPluginMetadata() - .getRevisionTimestampPreviousPlugin())); - - dataset.setDatasetIdsToRedirectFrom(List.of("253")); - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionId", - ((ExecutablePlugin) workflowExecutionPre.getMetisPluginWithType(PluginType.PREVIEW).get())); - - final WorkflowExecution workflowExecution = workflowExecutionFactory.createWorkflowExecution(workflow, dataset, predecessor, - priority); - - final AbstractMetisPlugin abstractMetisPlugin = workflowExecution.getMetisPluginWithType( - PluginType.PUBLISH).get(); - assertTrue(abstractMetisPlugin.getPluginMetadata().isPerformRedirects()); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionBase.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionBase.java deleted file mode 100644 index a6765bb13b..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionBase.java +++ /dev/null @@ -1,539 +0,0 @@ -package eu.europeana.metis.core.service; - -import eu.europeana.cloud.common.model.dps.TaskState; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.Dataset.PublicationFitness; -import eu.europeana.metis.core.workflow.ValidationProperties; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.DataStatus; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.ExecutionProgress; -import eu.europeana.metis.core.workflow.plugins.HTTPHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.IndexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.MediaProcessPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.PluginStatus; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPreviewPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ReindexToPublishPlugin; -import eu.europeana.metis.core.workflow.plugins.ReindexToPublishPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ThrottlingLevel; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import org.bson.types.ObjectId; -import org.jetbrains.annotations.NotNull; - -public class TestRedirectionBase { - - @NotNull - static Workflow getWorkflow(ObjectId objectId, IndexToPublishPluginMetadata indexToPublishPluginMetadata) { - final Workflow workflow = new Workflow(); - workflow.setDatasetId("datasetId"); - workflow.setId(objectId); - workflow.setMetisPluginsMetadata(List.of(indexToPublishPluginMetadata)); - return workflow; - } - - @NotNull - static HTTPHarvestPluginMetadata getHttpHarvestPluginMetadata() { - final HTTPHarvestPluginMetadata httpHarvestPluginMetadata = new HTTPHarvestPluginMetadata(); - httpHarvestPluginMetadata.setUrl("https://url.org"); - httpHarvestPluginMetadata.setUser("user"); - httpHarvestPluginMetadata.setIncrementalHarvest(false); - httpHarvestPluginMetadata.setEnabled(true); - return httpHarvestPluginMetadata; - } - - @NotNull - static ValidationExternalPluginMetadata getValidationExternalPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final ValidationExternalPluginMetadata validationExternalPluginMetadata = new ValidationExternalPluginMetadata(); - validationExternalPluginMetadata.setEnabled(true); - validationExternalPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - validationExternalPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - validationExternalPluginMetadata.setUrlOfSchemasZip("http://ftp.eanadev.org/schema_zips/europeana_schemas-20220809.zip"); - validationExternalPluginMetadata.setSchemaRootPath("EDM.xsd"); - validationExternalPluginMetadata.setSchematronRootPath("schematron/schematron.xsl"); - return validationExternalPluginMetadata; - } - - @NotNull - static TransformationPluginMetadata getTransformationPluginMetadata(Dataset dataset, String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final TransformationPluginMetadata transformationPluginMetadata = new TransformationPluginMetadata(); - transformationPluginMetadata.setEnabled(true); - transformationPluginMetadata.setCustomXslt(false); - transformationPluginMetadata.setCountry("Netherlands"); - transformationPluginMetadata.setLanguage("nl"); - transformationPluginMetadata.setDatasetName(dataset.getDatasetName()); - transformationPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - transformationPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return transformationPluginMetadata; - } - - @NotNull - static ValidationInternalPluginMetadata getValidationInternalPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final ValidationInternalPluginMetadata validationInternalPluginMetadata = new ValidationInternalPluginMetadata(); - validationInternalPluginMetadata.setEnabled(true); - validationInternalPluginMetadata.setUrlOfSchemasZip("http://ftp.eanadev.org/schema_zips/europeana_schemas-20220809.zip"); - validationInternalPluginMetadata.setSchemaRootPath("EDM-INTERNAL.xsd"); - validationInternalPluginMetadata.setSchematronRootPath("schematron/schematron-internal.xsl"); - validationInternalPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - validationInternalPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return validationInternalPluginMetadata; - } - - @NotNull - static NormalizationPluginMetadata getNormalizationPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final NormalizationPluginMetadata normalizationPluginMetadata = new NormalizationPluginMetadata(); - normalizationPluginMetadata.setEnabled(true); - normalizationPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - normalizationPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return normalizationPluginMetadata; - } - - @NotNull - static EnrichmentPluginMetadata getEnrichmentPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final EnrichmentPluginMetadata enrichmentPluginMetadata = new EnrichmentPluginMetadata(); - enrichmentPluginMetadata.setEnabled(true); - enrichmentPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - enrichmentPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return enrichmentPluginMetadata; - } - - @NotNull - static MediaProcessPluginMetadata getMediaProcessPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final MediaProcessPluginMetadata mediaProcessPluginMetadata = new MediaProcessPluginMetadata(); - mediaProcessPluginMetadata.setEnabled(true); - mediaProcessPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - mediaProcessPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - mediaProcessPluginMetadata.setThrottlingLevel(ThrottlingLevel.STRONG); - return mediaProcessPluginMetadata; - } - - @NotNull - static ReindexToPreviewPluginMetadata getReindexToPreviewPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final ReindexToPreviewPluginMetadata reindexToPreviewPluginMetadata = new ReindexToPreviewPluginMetadata(); - reindexToPreviewPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - reindexToPreviewPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return reindexToPreviewPluginMetadata; - } - - @NotNull - static ReindexToPublishPluginMetadata getReindexToPublishPluginMetadata(String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final ReindexToPublishPluginMetadata reindexToPublishPluginMetadata = new ReindexToPublishPluginMetadata(); - reindexToPublishPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - reindexToPublishPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return reindexToPublishPluginMetadata; - } - - @NotNull - static IndexToPreviewPluginMetadata getIndexToPreviewPluginMetadata(Date harvestDate, String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final IndexToPreviewPluginMetadata indexToPreviewPluginMetadata = new IndexToPreviewPluginMetadata(); - indexToPreviewPluginMetadata.setIncrementalIndexing(false); - indexToPreviewPluginMetadata.setHarvestDate(harvestDate); - indexToPreviewPluginMetadata.setPreserveTimestamps(false); - indexToPreviewPluginMetadata.setEnabled(true); - indexToPreviewPluginMetadata.setDatasetIdsToRedirectFrom(List.of()); - indexToPreviewPluginMetadata.setPerformRedirects(true); - indexToPreviewPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - indexToPreviewPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - - return indexToPreviewPluginMetadata; - } - - @NotNull - static IndexToPublishPluginMetadata getIndexToPublishPluginMetadata(Date harvestDate, String revisionNamePreviousPlugin, - Date revisionTimeStampPreviousPlugin) { - final IndexToPublishPluginMetadata indexToPublishPluginMetadata = new IndexToPublishPluginMetadata(); - indexToPublishPluginMetadata.setIncrementalIndexing(false); - indexToPublishPluginMetadata.setHarvestDate(harvestDate); - indexToPublishPluginMetadata.setPreserveTimestamps(false); - indexToPublishPluginMetadata.setEnabled(true); - indexToPublishPluginMetadata.setDatasetIdsToRedirectFrom(List.of()); - indexToPublishPluginMetadata.setPerformRedirects(true); - indexToPublishPluginMetadata.setRevisionNamePreviousPlugin(revisionNamePreviousPlugin); - indexToPublishPluginMetadata.setRevisionTimestampPreviousPlugin(revisionTimeStampPreviousPlugin); - return indexToPublishPluginMetadata; - } - - @NotNull - static Dataset getTestDataset() { - final Dataset dataset = new Dataset(); - dataset.setDatasetId("datasetId"); - dataset.setCountry(Country.GERMANY); - dataset.setDatasetName("dataset test name"); - dataset.setDescription(""); - dataset.setOrganizationId("1482250000001617026"); - dataset.setCreatedByUserId("1482250000016772002"); - dataset.setLanguage(Language.MUL); - dataset.setDatasetIdsToRedirectFrom(List.of()); - dataset.setOrganizationName("Europeana Foundation"); - dataset.setCreatedByUserId("userId"); - dataset.setCreatedDate(Date.from(Instant.now().minus(120, ChronoUnit.MINUTES))); - dataset.setUpdatedDate(Date.from(Instant.now())); - dataset.setReplacedBy(""); - dataset.setDataProvider("Kunsthochschule Kassel"); - dataset.setProvider("EFG"); - dataset.setIntermediateProvider(""); - dataset.setNotes(""); - dataset.setEcloudDatasetId("377ac607-f729-483d-a86d-2c005150c46d"); - dataset.setPublicationFitness(PublicationFitness.FIT); - return dataset; - } - - @NotNull - static ExecutionProgress getExecutionProgress() { - final ExecutionProgress executionProgress = new ExecutionProgress(); - executionProgress.setStatus(TaskState.PROCESSED); - executionProgress.setExpectedRecords(1); - executionProgress.setProcessedRecords(1); - executionProgress.setProgressPercentage(100); - executionProgress.setIgnoredRecords(0); - executionProgress.setDeletedRecords(0); - executionProgress.setErrors(0); - executionProgress.setTotalDatabaseRecords(-1); - return executionProgress; - } - - @NotNull - static AbstractExecutablePlugin getExecutablePlugin(ExecutablePluginMetadata executablePluginMetadata, - Date startDate, - Date updateDate, - Date finishDate, - DataStatus dataStatus, - String id, - ExecutionProgress executionProgress) { - final AbstractExecutablePlugin executablePlugin = - ExecutablePluginFactory.createPlugin(executablePluginMetadata); - executablePlugin.setExecutionProgress(executionProgress); - executablePlugin.setId(id); - executablePlugin.setPluginStatus(PluginStatus.FINISHED); - executablePlugin.setStartedDate(startDate); - executablePlugin.setUpdatedDate(updateDate); - executablePlugin.setFinishedDate(finishDate); - executablePlugin.setDataStatus(dataStatus); - executablePlugin.setExternalTaskId(String.valueOf(Instant.now().toEpochMilli())); - return executablePlugin; - } - - @NotNull - static List getExecutablePluginTypes() { - final List typesInWorkflow = new ArrayList<>(); - typesInWorkflow.add(ExecutablePluginType.HTTP_HARVEST); - typesInWorkflow.add(ExecutablePluginType.VALIDATION_EXTERNAL); - typesInWorkflow.add(ExecutablePluginType.TRANSFORMATION); - typesInWorkflow.add(ExecutablePluginType.VALIDATION_INTERNAL); - typesInWorkflow.add(ExecutablePluginType.NORMALIZATION); - typesInWorkflow.add(ExecutablePluginType.ENRICHMENT); - typesInWorkflow.add(ExecutablePluginType.MEDIA_PROCESS); - typesInWorkflow.add(ExecutablePluginType.PREVIEW); - typesInWorkflow.add(ExecutablePluginType.PUBLISH); - return typesInWorkflow; - } - - static Date getDateMinusMinutes(Date date, long minutes) { - return Date.from(Instant.from(date.toInstant()).minus(minutes, ChronoUnit.MINUTES)); - } - - @NotNull - static WorkflowExecution getWorkflowPreReindex(Dataset dataset) { - final WorkflowExecution workflowExecution = new WorkflowExecution(); - workflowExecution.setDatasetId(dataset.getDatasetId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecution.setEcloudDatasetId(dataset.getEcloudDatasetId()); - workflowExecution.setStartedBy(dataset.getCreatedByUserId()); - workflowExecution.setWorkflowPriority(0); - workflowExecution.setCancelling(false); - Date templateDate = Date.from(Instant.now()); - - workflowExecution.setCreatedDate(templateDate); - workflowExecution.setStartedDate(getDateMinusMinutes(templateDate,28)); - workflowExecution.setUpdatedDate(getDateMinusMinutes(templateDate,20)); - workflowExecution.setFinishedDate(getDateMinusMinutes(templateDate,20)); - - workflowExecution.setMetisPlugins(List.of( - getExecutablePlugin(getHttpHarvestPluginMetadata(), - getDateMinusMinutes(templateDate,28), - getDateMinusMinutes(templateDate,28), - getDateMinusMinutes(templateDate,28), - DataStatus.VALID, - "75671dea3818387b1e4bd92c-HTTP_HARVEST", - getExecutionProgress() - ), - getExecutablePlugin(getValidationExternalPluginMetadata("HTTP_HARVEST", - getDateMinusMinutes(templateDate,28)), - getDateMinusMinutes(templateDate,27), - getDateMinusMinutes(templateDate,27), - getDateMinusMinutes(templateDate,27), - DataStatus.VALID, - "75671dea3818387b1e4bd92d-VALIDATION_EXTERNAL", - getExecutionProgress() - ), - getExecutablePlugin(getTransformationPluginMetadata(dataset, "VALIDATION_EXTERNAL", - getDateMinusMinutes(templateDate,27)), - getDateMinusMinutes(templateDate,26), - getDateMinusMinutes(templateDate,26), - getDateMinusMinutes(templateDate,26), - DataStatus.VALID, - "75671dea3818387b1e4bd92e-TRANSFORMATION", - getExecutionProgress() - ), - getExecutablePlugin(getValidationInternalPluginMetadata("TRANSFORMATION", - getDateMinusMinutes(templateDate,26)), - getDateMinusMinutes(templateDate,25), - getDateMinusMinutes(templateDate,25), - getDateMinusMinutes(templateDate,25), - DataStatus.VALID, - "75671dea3818387b1e4bd92f-VALIDATION_INTERNAL", - getExecutionProgress() - ), - getExecutablePlugin(getNormalizationPluginMetadata("VALIDATION_INTERNAL", - getDateMinusMinutes(templateDate,25)), - getDateMinusMinutes(templateDate,24), - getDateMinusMinutes(templateDate,24), - getDateMinusMinutes(templateDate,24), - DataStatus.VALID, - "75671dea3818387b1e4bd930-NORMALIZATION", - getExecutionProgress() - ), - getExecutablePlugin(getEnrichmentPluginMetadata("NORMALIZATION", - getDateMinusMinutes(templateDate,24)), - getDateMinusMinutes(templateDate,23), - getDateMinusMinutes(templateDate,23), - getDateMinusMinutes(templateDate,23), - DataStatus.DEPRECATED, - "75671dea3818387b1e4bd931-ENRICHMENT", - getExecutionProgress() - ), - getExecutablePlugin(getMediaProcessPluginMetadata("ENRICHMENT", - getDateMinusMinutes(templateDate,23)), - getDateMinusMinutes(templateDate,22), - getDateMinusMinutes(templateDate,22), - getDateMinusMinutes(templateDate,22), - DataStatus.DEPRECATED, - "75671dea3818387b1e4bd932-MEDIA_PROCESS", - getExecutionProgress() - ), - getExecutablePlugin(getIndexToPreviewPluginMetadata( - getDateMinusMinutes(templateDate,28), - "MEDIA_PROCESS", - getDateMinusMinutes(templateDate,22)), - getDateMinusMinutes(templateDate,21), - getDateMinusMinutes(templateDate,21), - getDateMinusMinutes(templateDate,21), - DataStatus.DEPRECATED, - "75671dea3818387b1e4bd933-PREVIEW", - getExecutionProgress() - ), - getExecutablePlugin(getIndexToPublishPluginMetadata( - getDateMinusMinutes(templateDate,28), - "PREVIEW", - getDateMinusMinutes(templateDate,21)), - getDateMinusMinutes(templateDate,20), - getDateMinusMinutes(templateDate,20), - getDateMinusMinutes(templateDate,20), - DataStatus.DEPRECATED, - "75671dea3818387b1e4bd934-PUBLISH", - getExecutionProgress() - ) - )); - return workflowExecution; - } - - @NotNull - static WorkflowExecution getWorkflowReindex(Dataset dataset, Date harvestDate) { - final WorkflowExecution workflowExecution = new WorkflowExecution(); - workflowExecution.setDatasetId(dataset.getDatasetId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecution.setEcloudDatasetId(dataset.getEcloudDatasetId()); - workflowExecution.setStartedBy(dataset.getCreatedByUserId()); - workflowExecution.setWorkflowPriority(0); - workflowExecution.setCancelling(false); - Date templateDate = Date.from(Instant.now()); - - workflowExecution.setCreatedDate(templateDate); - workflowExecution.setStartedDate(getDateMinusMinutes(templateDate,19)); - workflowExecution.setUpdatedDate(getDateMinusMinutes(templateDate,11)); - workflowExecution.setFinishedDate(getDateMinusMinutes(templateDate,11)); - - ReindexToPreviewPluginMetadata reindexToPreviewPluginMetadata = getReindexToPreviewPluginMetadata("VALIDATION_INTERNAL",harvestDate); - ReindexToPreviewPlugin reindexToPreviewPlugin = new ReindexToPreviewPlugin(reindexToPreviewPluginMetadata); - reindexToPreviewPlugin.setStartedDate(getDateMinusMinutes(templateDate,11)); - reindexToPreviewPlugin.setUpdatedDate(getDateMinusMinutes(templateDate,11)); - reindexToPreviewPlugin.setFinishedDate(getDateMinusMinutes(templateDate,11)); - reindexToPreviewPlugin.setPluginStatus(PluginStatus.FINISHED); - reindexToPreviewPlugin.setDataStatus(DataStatus.VALID); - reindexToPreviewPlugin.setId("74b628a62d563e2ef58976d0-REINDEX_TO_PREVIEW"); - - ReindexToPublishPluginMetadata reindexToPublishPluginMetadata = getReindexToPublishPluginMetadata("REINDEX_TO_PREVIEW", getDateMinusMinutes(templateDate,11) ); - ReindexToPublishPlugin reindexToPublishPlugin = new ReindexToPublishPlugin(reindexToPublishPluginMetadata); - reindexToPublishPlugin.setStartedDate(getDateMinusMinutes(templateDate,11)); - reindexToPublishPlugin.setUpdatedDate(getDateMinusMinutes(templateDate,11)); - reindexToPublishPlugin.setFinishedDate(getDateMinusMinutes(templateDate,11)); - reindexToPublishPlugin.setPluginStatus(PluginStatus.FINISHED); - reindexToPublishPlugin.setDataStatus(DataStatus.VALID); - reindexToPublishPlugin.setId("74b628a62d563e2ef58976d1-REINDEX_TO_PUBLISH"); - - workflowExecution.setMetisPlugins(List.of( - reindexToPreviewPlugin, - reindexToPublishPlugin - )); - return workflowExecution; - } - - @NotNull - static WorkflowExecution getWorkflowPostReindex(Dataset dataset) { - final WorkflowExecution workflowExecution = new WorkflowExecution(); - workflowExecution.setDatasetId(dataset.getDatasetId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.FINISHED); - workflowExecution.setEcloudDatasetId(dataset.getEcloudDatasetId()); - workflowExecution.setStartedBy(dataset.getCreatedByUserId()); - workflowExecution.setWorkflowPriority(0); - workflowExecution.setCancelling(false); - Date templateDate = Date.from(Instant.now()); - - workflowExecution.setCreatedDate(templateDate); - workflowExecution.setStartedDate(getDateMinusMinutes(templateDate,10)); - workflowExecution.setUpdatedDate(getDateMinusMinutes(templateDate,9)); - workflowExecution.setFinishedDate(getDateMinusMinutes(templateDate,9)); - - workflowExecution.setMetisPlugins(List.of( - getExecutablePlugin(getHttpHarvestPluginMetadata(), - getDateMinusMinutes(templateDate,10), - getDateMinusMinutes(templateDate,10), - getDateMinusMinutes(templateDate,10), - DataStatus.VALID, - "85671dea3818387b1e4bd92c-HTTP_HARVEST", - getExecutionProgress() - ), - getExecutablePlugin(getValidationExternalPluginMetadata("HTTP_HARVEST", - getDateMinusMinutes(templateDate,10)), - getDateMinusMinutes(templateDate,8), - getDateMinusMinutes(templateDate,8), - getDateMinusMinutes(templateDate,8), - DataStatus.VALID, - "85671dea3818387b1e4bd92d-VALIDATION_EXTERNAL", - getExecutionProgress() - ), - getExecutablePlugin(getTransformationPluginMetadata(dataset, "VALIDATION_EXTERNAL", - getDateMinusMinutes(templateDate,8)), - getDateMinusMinutes(templateDate,7), - getDateMinusMinutes(templateDate,7), - getDateMinusMinutes(templateDate,7), - DataStatus.VALID, - "85671dea3818387b1e4bd92e-TRANSFORMATION", - getExecutionProgress() - ), - getExecutablePlugin(getValidationInternalPluginMetadata("TRANSFORMATION", - getDateMinusMinutes(templateDate,7)), - getDateMinusMinutes(templateDate,6), - getDateMinusMinutes(templateDate,6), - getDateMinusMinutes(templateDate,6), - DataStatus.VALID, - "85671dea3818387b1e4bd92f-VALIDATION_INTERNAL", - getExecutionProgress() - ), - getExecutablePlugin(getNormalizationPluginMetadata("VALIDATION_INTERNAL", - getDateMinusMinutes(templateDate,6)), - getDateMinusMinutes(templateDate,5), - getDateMinusMinutes(templateDate,5), - getDateMinusMinutes(templateDate,5), - DataStatus.VALID, - "85671dea3818387b1e4bd930-NORMALIZATION", - getExecutionProgress() - ), - getExecutablePlugin(getEnrichmentPluginMetadata("NORMALIZATION", - getDateMinusMinutes(templateDate,5)), - getDateMinusMinutes(templateDate,4), - getDateMinusMinutes(templateDate,4), - getDateMinusMinutes(templateDate,4), - DataStatus.VALID, - "85671dea3818387b1e4bd931-ENRICHMENT", - getExecutionProgress() - ), - getExecutablePlugin(getMediaProcessPluginMetadata("ENRICHMENT", - getDateMinusMinutes(templateDate,4)), - getDateMinusMinutes(templateDate,3), - getDateMinusMinutes(templateDate,3), - getDateMinusMinutes(templateDate,3), - DataStatus.VALID, - "85671dea3818387b1e4bd932-MEDIA_PROCESS", - getExecutionProgress() - ), - getExecutablePlugin(getIndexToPreviewPluginMetadata( - getDateMinusMinutes(templateDate,10), - "MEDIA_PROCESS", - getDateMinusMinutes(templateDate,3)), - getDateMinusMinutes(templateDate,2), - getDateMinusMinutes(templateDate,2), - getDateMinusMinutes(templateDate,2), - DataStatus.VALID, - "85671dea3818387b1e4bd933-PREVIEW", - getExecutionProgress() - ), - getExecutablePlugin(getIndexToPublishPluginMetadata( - getDateMinusMinutes(templateDate,10), - "PREVIEW", - getDateMinusMinutes(templateDate,2)), - getDateMinusMinutes(templateDate,1), - getDateMinusMinutes(templateDate,1), - getDateMinusMinutes(templateDate,1), - DataStatus.VALID, - "85671dea3818387b1e4bd934-PUBLISH", - getExecutionProgress() - ) - )); - return workflowExecution; - } - - @NotNull - static Workflow getWorkflowFromNormalization(Dataset dataset) { - final ObjectId objectId = new ObjectId(); - final Workflow workflow = new Workflow(); - workflow.setDatasetId(dataset.getDatasetId()); - workflow.setId(objectId); - workflow.setMetisPluginsMetadata(List.of( - getNormalizationPluginMetadata(null,null), - getEnrichmentPluginMetadata(null,null), - getMediaProcessPluginMetadata(null,null), - getIndexToPreviewPluginMetadata(null,null,null), - getIndexToPublishPluginMetadata(null,null,null))); - return workflow; - } - - static ValidationProperties getValidationExternalProperties() { - return new ValidationProperties("http://ftp.eanadev.org/schema_zips/europeana_schemas-20220809.zip", - "EDM.xsd", - "schematron/schematron.xsl"); - } - static ValidationProperties getValidationInternalProperties() { - return new ValidationProperties("http://ftp.eanadev.org/schema_zips/europeana_schemas-20220809.zip", - "EDM-INTERNAL.xsd", - "schematron/schematron-internal.xsl"); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionInferrer.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionInferrer.java deleted file mode 100644 index 4d0d696e45..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestRedirectionInferrer.java +++ /dev/null @@ -1,188 +0,0 @@ -package eu.europeana.metis.core.service; - -import static eu.europeana.metis.core.service.TestRedirectionBase.getExecutablePluginTypes; -import static eu.europeana.metis.core.service.TestRedirectionBase.getTestDataset; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflowPostReindex; -import static eu.europeana.metis.core.service.TestRedirectionBase.getWorkflowPreReindex; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.core.dao.DataEvolutionUtils; -import eu.europeana.metis.core.dao.PluginWithExecutionId; -import eu.europeana.metis.core.dao.WorkflowExecutionDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.plugins.ExecutablePlugin; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginType; -import eu.europeana.metis.core.workflow.plugins.PluginType; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class TestRedirectionInferrer { - - @Mock - WorkflowExecutionDao workflowExecutionDao; - - @Mock - DataEvolutionUtils dataEvolutionUtils; - - @InjectMocks - RedirectionInferrer redirectionInferrer; - - @Test - void shouldRedirectsBePerformed_whenRootAncestorDifferent_expectRedirect() { - final Dataset dataset = getTestDataset(); - final WorkflowExecution workflowExecution = getWorkflowPostReindex(dataset); - final PluginWithExecutionId indexToPublishPluginWithExecutionId = - new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PUBLISH).get()); - final PluginWithExecutionId indexToPreviewPluginWithExecutionId2 = - new PluginWithExecutionId<>("executionId2", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(indexToPublishPluginWithExecutionId); - - when(dataEvolutionUtils.getRootAncestor(any())) - .thenReturn(indexToPublishPluginWithExecutionId) - .thenReturn(indexToPreviewPluginWithExecutionId2); - - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, predecessor, - ExecutablePluginType.PREVIEW, new ArrayList<>()); - - assertTrue(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenRootAncestorTheSame_expectNoRedirect() { - final Dataset dataset = getTestDataset(); - final WorkflowExecution workflowExecution = getWorkflowPreReindex(dataset); - - final PluginWithExecutionId indexToPublishPluginWithExecutionId = - new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PUBLISH).get()); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(indexToPublishPluginWithExecutionId); - - when(dataEvolutionUtils.getRootAncestor(any())) - .thenReturn(indexToPublishPluginWithExecutionId) - .thenReturn(indexToPublishPluginWithExecutionId); - - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, predecessor, - ExecutablePluginType.PREVIEW, new ArrayList<>()); - - assertFalse(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenLatestSuccessfulPluginButDatasetUpdate_expectRedirect() { - final Dataset dataset = getTestDataset(); - dataset.setUpdatedDate(Date.from(Instant.now())); - dataset.setDatasetIdsToRedirectFrom(List.of("258")); - final WorkflowExecution workflowExecution = getWorkflowPostReindex(dataset); - - final PluginWithExecutionId indexToPreviewPluginWithExecutionId = - new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(indexToPreviewPluginWithExecutionId); - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, predecessor, - ExecutablePluginType.PREVIEW, new ArrayList<>()); - - assertTrue(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenLatestSuccessfulPluginButNoDatasetUpdate_expectNoRedirect() { - final Dataset dataset = getTestDataset(); - final WorkflowExecution workflowExecution = getWorkflowPostReindex(dataset); - - final PluginWithExecutionId indexToPreviewPluginWithExecutionId = - new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(indexToPreviewPluginWithExecutionId); - when(dataEvolutionUtils.getRootAncestor(any())) - .thenReturn(indexToPreviewPluginWithExecutionId) - .thenReturn(indexToPreviewPluginWithExecutionId); - final PluginWithExecutionId predecessor = new PluginWithExecutionId<>("executionId", - (ExecutablePlugin) workflowExecution.getMetisPluginWithType(PluginType.PREVIEW).get()); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, predecessor, - ExecutablePluginType.PREVIEW, new ArrayList<>()); - - assertFalse(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenNoLatestSuccessfulPluginButDatasetRedirects_expectRedirect() { - final Dataset dataset = getTestDataset(); - dataset.setDatasetIdsToRedirectFrom(List.of("258")); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(null); - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, null, - ExecutablePluginType.PREVIEW, - new ArrayList<>()); - - assertTrue(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenNoLatestSuccessfulPluginAndNoDatasetRedirects_expectNoRedirect() { - final Dataset dataset = getTestDataset(); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(null); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, null, - ExecutablePluginType.PREVIEW, - new ArrayList<>()); - - assertFalse(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenTypesInWorkflowSameAsExecutablePluginType_expectNoRedirect() { - final Dataset dataset = getTestDataset(); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(null); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, null, ExecutablePluginType.PREVIEW, - getExecutablePluginTypes()); - - assertFalse(redirectsToBePerformed); - } - - @Test - void shouldRedirectsBePerformed_whenTypesInWorkflowLatestHarvestAndDatasetRedirect_expectRedirect() { - final Dataset dataset = getTestDataset(); - dataset.setDatasetIdsToRedirectFrom(List.of("258")); - when(workflowExecutionDao.getLatestSuccessfulExecutablePlugin(anyString(), any(), eq(Boolean.FALSE))) - .thenReturn(null); - - boolean redirectsToBePerformed = redirectionInferrer.shouldRedirectsBePerformed(dataset, null, - ExecutablePluginType.HTTP_HARVEST, - getExecutablePluginTypes()); - - assertTrue(redirectsToBePerformed); - } -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestScheduleWorkflowService.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestScheduleWorkflowService.java deleted file mode 100644 index 8400ca1419..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/service/TestScheduleWorkflowService.java +++ /dev/null @@ -1,316 +0,0 @@ -package eu.europeana.metis.core.service; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.dao.DatasetDao; -import eu.europeana.metis.core.dao.ScheduledWorkflowDao; -import eu.europeana.metis.core.dao.WorkflowDao; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.exceptions.NoDatasetFoundException; -import eu.europeana.metis.core.exceptions.NoScheduledWorkflowFoundException; -import eu.europeana.metis.core.exceptions.NoWorkflowFoundException; -import eu.europeana.metis.core.exceptions.ScheduledWorkflowAlreadyExistsException; -import eu.europeana.metis.core.utils.TestObjectFactory; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.exception.BadContentException; -import eu.europeana.metis.exception.UserUnauthorizedException; -import java.time.LocalDateTime; -import org.bson.types.ObjectId; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2018-04-05 - */ -class TestScheduleWorkflowService { - - private static ScheduledWorkflowDao scheduledWorkflowDao; - private static WorkflowDao workflowDao; - private static DatasetDao datasetDao; - private static ScheduleWorkflowService scheduleWorkflowService; - private static Authorizer authorizer; - - @BeforeAll - static void prepare() { - workflowDao = mock(WorkflowDao.class); - scheduledWorkflowDao = mock(ScheduledWorkflowDao.class); - datasetDao = mock(DatasetDao.class); - authorizer = mock(Authorizer.class); - - scheduleWorkflowService = new ScheduleWorkflowService(scheduledWorkflowDao, workflowDao, - datasetDao, authorizer); - } - - @AfterEach - void cleanUp() { - reset(workflowDao); - reset(scheduledWorkflowDao); - reset(datasetDao); - reset(authorizer); - } - - @Test - void getScheduledWorkflowByDatasetId() - throws UserUnauthorizedException, NoDatasetFoundException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - scheduleWorkflowService.getScheduledWorkflowByDatasetId(metisUserView, datasetId); - verify(scheduledWorkflowDao, times(1)).getScheduledWorkflowByDatasetId(anyString()); - verifyNoMoreInteractions(scheduledWorkflowDao); - verify(authorizer, times(1)).authorizeReadExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - } - - @Test - void scheduleWorkflow() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(null); - when(scheduledWorkflowDao.create(scheduledWorkflow)) - .thenReturn(new ScheduledWorkflow(null, datasetId, null, 0)); - scheduleWorkflowService.scheduleWorkflow(metisUserView, scheduledWorkflow); - verify(authorizer, times(1)).authorizeWriteExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - } - - @Test - void scheduleWorkflow_NoDatasetFoundException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(null); - assertThrows(NoDatasetFoundException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void scheduleWorkflow_NoWorkflowFoundException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(null); - assertThrows(NoWorkflowFoundException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void scheduleWorkflow_ScheduledWorkflowAlreadyExistsException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(new ObjectId().toString()); - assertThrows(ScheduledWorkflowAlreadyExistsException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void scheduleUserWorkflow_BadContentException_nullPointerDate() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setPointerDate(null); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(null); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void scheduleWorkflow_BadContentException_NULLScheduleFrequence() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setScheduleFrequence(ScheduleFrequence.NULL); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(null); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void scheduleWorkflow_BadContentException_nullScheduleFrequence() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setScheduleFrequence(null); - Dataset dataset = TestObjectFactory.createDataset(TestObjectFactory.DATASETNAME); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - when(datasetDao.getDatasetByDatasetId(datasetId)).thenReturn(dataset); - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(null); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.scheduleWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void getAllScheduledWorkflows() throws UserUnauthorizedException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - scheduleWorkflowService.getAllScheduledWorkflows(metisUserView, ScheduleFrequence.ONCE, 0); - verify(scheduledWorkflowDao, times(1)).getAllScheduledWorkflows(any(ScheduleFrequence.class), - anyInt()); - verify(authorizer, times(1)).authorizeReadAllDatasets(metisUserView); - verifyNoMoreInteractions(authorizer); - } - - @Test - void getAllScheduledUserWorkflowsByDateRangeONCE() { - scheduleWorkflowService - .getAllScheduledWorkflowsByDateRangeONCE(LocalDateTime.now(), LocalDateTime.now(), - 0); - verify(scheduledWorkflowDao, times(1)) - .getAllScheduledWorkflowsByDateRangeONCE(any(LocalDateTime.class), - any(LocalDateTime.class), anyInt()); - } - - @Test - void updateScheduledWorkflow() throws Exception { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(new ObjectId().toString()); - when(scheduledWorkflowDao.update(scheduledWorkflow)) - .thenReturn(new ObjectId().toString()); - scheduleWorkflowService.updateScheduledWorkflow(metisUserView, scheduledWorkflow); - verify(authorizer, times(1)).authorizeWriteExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - } - - @Test - void updateScheduledUserWorkflow_NoUserWorkflowFoundException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - when(workflowDao.getWorkflow(datasetId)).thenReturn(null); - assertThrows(NoWorkflowFoundException.class, () -> scheduleWorkflowService.updateScheduledWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void updateScheduledWorkflow_NoScheduledWorkflowFoundException() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(null); - assertThrows(NoScheduledWorkflowFoundException.class, () -> scheduleWorkflowService.updateScheduledWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void updateScheduledWorkflow_BadContentException_nullPointerDate() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setPointerDate(null); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(new ObjectId().toString()); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.updateScheduledWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void updateScheduledWorkflow_BadContentException_NULLScheduleFrequence() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setScheduleFrequence(ScheduleFrequence.NULL); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(new ObjectId().toString()); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.updateScheduledWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void updateScheduledWorkflow_BadContentException_nullScheduleFrequence() { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - ScheduledWorkflow scheduledWorkflow = TestObjectFactory - .createScheduledWorkflowObject(); - scheduledWorkflow.setScheduleFrequence(null); - Workflow workflow = TestObjectFactory.createWorkflowObject(); - - when(workflowDao.getWorkflow(datasetId)).thenReturn(workflow); - when(scheduledWorkflowDao.existsForDatasetId(datasetId)) - .thenReturn(new ObjectId().toString()); - assertThrows(BadContentException.class, () -> scheduleWorkflowService.updateScheduledWorkflow( - metisUserView, scheduledWorkflow)); - } - - @Test - void deleteScheduledWorkflow() throws UserUnauthorizedException, NoDatasetFoundException { - final MetisUserView metisUserView = TestObjectFactory.createMetisUser(TestObjectFactory.EMAIL); - final String datasetId = Integer.toString(TestObjectFactory.DATASETID); - scheduleWorkflowService.deleteScheduledWorkflow(metisUserView, datasetId); - verify(scheduledWorkflowDao, times(1)).deleteScheduledWorkflow(anyString()); - verify(authorizer, times(1)).authorizeWriteExistingDatasetById(metisUserView, datasetId); - verifyNoMoreInteractions(authorizer); - } - - @Test - void getScheduledWorkflowsPerRequest() { - scheduleWorkflowService.getScheduledWorkflowsPerRequest(); - verify(scheduledWorkflowDao, times(1)).getScheduledWorkflowPerRequest(); - } - -} diff --git a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/utils/TestObjectFactory.java b/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/utils/TestObjectFactory.java deleted file mode 100644 index f420bcbf0b..0000000000 --- a/metis-core/metis-core-service/src/test/java/eu/europeana/metis/core/utils/TestObjectFactory.java +++ /dev/null @@ -1,394 +0,0 @@ -package eu.europeana.metis.core.utils; - -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; - -import eu.europeana.cloud.common.model.dps.ErrorDetails; -import eu.europeana.cloud.common.model.dps.NodeStatistics; -import eu.europeana.cloud.common.model.dps.RecordState; -import eu.europeana.cloud.common.model.dps.StatisticsReport; -import eu.europeana.cloud.common.model.dps.SubTaskInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorInfo; -import eu.europeana.cloud.common.model.dps.TaskErrorsInfo; -import eu.europeana.metis.authentication.user.AccountRole; -import eu.europeana.metis.authentication.user.MetisUserView; -import eu.europeana.metis.core.common.Country; -import eu.europeana.metis.core.common.Language; -import eu.europeana.metis.core.dao.WorkflowExecutionDao.ExecutionDatasetPair; -import eu.europeana.metis.core.dataset.Dataset; -import eu.europeana.metis.core.dataset.Dataset.PublicationFitness; -import eu.europeana.metis.core.dataset.DatasetXslt; -import eu.europeana.metis.core.rest.Record; -import eu.europeana.metis.core.workflow.ScheduleFrequence; -import eu.europeana.metis.core.workflow.ScheduledWorkflow; -import eu.europeana.metis.core.workflow.Workflow; -import eu.europeana.metis.core.workflow.WorkflowExecution; -import eu.europeana.metis.core.workflow.WorkflowStatus; -import eu.europeana.metis.core.workflow.plugins.AbstractExecutablePluginMetadata; -import eu.europeana.metis.core.workflow.plugins.AbstractMetisPlugin; -import eu.europeana.metis.core.workflow.plugins.EnrichmentPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ExecutablePluginFactory; -import eu.europeana.metis.core.workflow.plugins.LinkCheckingPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.NormalizationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.OaipmhHarvestPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.TransformationPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationExternalPluginMetadata; -import eu.europeana.metis.core.workflow.plugins.ValidationInternalPluginMetadata; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; -import org.bson.types.ObjectId; - -/** - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2019-03-15 - */ -public class TestObjectFactory { - - public static final int DATASETID = 100; - public static final DatasetXslt DATASET_XSLT = new DatasetXslt(); - public static final String EXECUTIONID = "5a5dc67ba458bb00083d49e3"; - public static final String DATASETNAME = "datasetName"; - public static final String EMAIL = "user.metis@europeana.eu"; - public static final String AUTHORIZATION_HEADER = "Bearer 1234567890qwertyuiopasdfghjklQWE"; - public static final String TOPOLOGY_NAME = "topology_name"; - public static final long EXTERNAL_TASK_ID = 2_070_373_127_078_497_810L; - private static final int OCCURRENCES = 2; - - static { - DATASET_XSLT.setId(new ObjectId("5a9821af34f04b794dcf63df")); - } - - - private TestObjectFactory() { - } - - /** - * Create dummy workflow - * - * @return the created workflow - */ - public static Workflow createWorkflowObject() { - Workflow workflow = new Workflow(); - workflow.setDatasetId(Integer.toString(DATASETID)); - OaipmhHarvestPluginMetadata oaipmhHarvestPluginMetadata = new OaipmhHarvestPluginMetadata(); - oaipmhHarvestPluginMetadata.setUrl("http://example.com"); - oaipmhHarvestPluginMetadata.setEnabled(true); - ValidationExternalPluginMetadata validationExternalPluginMetadata = new ValidationExternalPluginMetadata(); - validationExternalPluginMetadata.setEnabled(true); - TransformationPluginMetadata transformationPluginMetadata = new TransformationPluginMetadata(); - transformationPluginMetadata.setEnabled(true); - ValidationInternalPluginMetadata validationInternalPluginMetadata = new ValidationInternalPluginMetadata(); - validationInternalPluginMetadata.setEnabled(true); - NormalizationPluginMetadata normalizationPluginMetadata = new NormalizationPluginMetadata(); - normalizationPluginMetadata.setEnabled(true); - LinkCheckingPluginMetadata linkCheckingPluginMetadata = new LinkCheckingPluginMetadata(); - linkCheckingPluginMetadata.setEnabled(true); - EnrichmentPluginMetadata enrichmentPluginMetadata = new EnrichmentPluginMetadata(); - enrichmentPluginMetadata.setEnabled(true); - - List abstractMetisPluginMetadata = new ArrayList<>(); - abstractMetisPluginMetadata.add(oaipmhHarvestPluginMetadata); - abstractMetisPluginMetadata.add(validationExternalPluginMetadata); - abstractMetisPluginMetadata.add(transformationPluginMetadata); - abstractMetisPluginMetadata.add(validationInternalPluginMetadata); - abstractMetisPluginMetadata.add(normalizationPluginMetadata); - abstractMetisPluginMetadata.add(linkCheckingPluginMetadata); - abstractMetisPluginMetadata.add(enrichmentPluginMetadata); - workflow.setMetisPluginsMetadata(abstractMetisPluginMetadata); - - return workflow; - } - - /** - * Create dummy workflow execution - * - * @return the created workflow execution - */ - public static WorkflowExecution createWorkflowExecutionObject() { - Dataset dataset = createDataset(DATASETNAME); - ArrayList abstractMetisPlugins = new ArrayList<>(); - AbstractMetisPlugin oaipmhHarvestPlugin = ExecutablePluginFactory - .createPlugin(new OaipmhHarvestPluginMetadata()); - abstractMetisPlugins.add(oaipmhHarvestPlugin); - AbstractMetisPlugin validationExternalPlugin = ExecutablePluginFactory - .createPlugin(new ValidationExternalPluginMetadata()); - abstractMetisPlugins.add(validationExternalPlugin); - - WorkflowExecution workflowExecution = new WorkflowExecution(dataset, abstractMetisPlugins, 0); - workflowExecution.setId(new ObjectId()); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setCreatedDate(new Date()); - - return workflowExecution; - } - - private static WorkflowExecution createWorkflowExecutionObject(Dataset dataset) { - WorkflowExecution workflowExecution = new WorkflowExecution(dataset, new ArrayList<>(), 0); - workflowExecution.setWorkflowStatus(WorkflowStatus.INQUEUE); - workflowExecution.setCreatedDate(new Date()); - - return workflowExecution; - } - - /** - * Create a list of dummy workflow executions. The dataset name will have a suffix number for each dataset. - * - * @param size the number of dummy workflow executions to create - * @return the created list - */ - public static List createListOfWorkflowExecutions(int size) { - return createExecutionsWithDatasets(size).stream().map(ExecutionDatasetPair::getExecution) - .toList(); - } - - /** - * Create a list of dummy execution overviews. The dataset name will have a suffix number for each dataset. - * - * @param size the number of dummy execution overviews to create - * @return the created list - */ - public static List createExecutionsWithDatasets(int size) { - final List result = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - Dataset dataset = createDataset(String.format("%s%s", DATASETNAME, i)); - dataset.setId(new ObjectId(new Date(i))); - dataset.setDatasetId(Integer.toString(DATASETID + i)); - WorkflowExecution workflowExecution = createWorkflowExecutionObject(dataset); - workflowExecution.setId(new ObjectId()); - result.add(new ExecutionDatasetPair(dataset, workflowExecution)); - } - return result; - } - - /** - * Create a dummy scheduled workflow - * - * @return the created scheduled workflow - */ - public static ScheduledWorkflow createScheduledWorkflowObject() { - ScheduledWorkflow scheduledWorkflow = new ScheduledWorkflow(); - scheduledWorkflow.setDatasetId(Integer.toString(DATASETID)); - scheduledWorkflow.setPointerDate(new Date()); - scheduledWorkflow.setScheduleFrequence(ScheduleFrequence.ONCE); - scheduledWorkflow.setWorkflowPriority(0); - return scheduledWorkflow; - } - - /** - * Create a list of dummy scheduled workflows. The dataset name will have a suffix number for each dataset. - * - * @param size the number of dummy scheduled workflows to create - * @return the created list - */ - public static List createListOfScheduledWorkflows(int size) { - List scheduledWorkflows = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ScheduledWorkflow scheduledWorkflow = createScheduledWorkflowObject(); - scheduledWorkflow.setId(new ObjectId()); - scheduledWorkflow.setDatasetId(Integer.toString(DATASETID + i)); - scheduledWorkflows.add(scheduledWorkflow); - } - return scheduledWorkflows; - } - - /** - * Create a list of dummy scheduled workflows with pointer date and frequency. The dataset name will have a suffix number for - * each dataset. - * - * @param size the number of dummy scheduled workflows to create - * @param date the pointer date - * @param scheduleFrequence the schedule frequence - * @return the created list - */ - public static List createListOfScheduledWorkflowsWithDateAndFrequence( - int size, Date date, ScheduleFrequence scheduleFrequence) { - List scheduledWorkflows = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ScheduledWorkflow scheduledWorkflow = createScheduledWorkflowObject(); - scheduledWorkflow.setId(new ObjectId()); - scheduledWorkflow.setDatasetId(Integer.toString(DATASETID + i)); - scheduledWorkflow.setPointerDate(date); - scheduledWorkflow.setScheduleFrequence(scheduleFrequence); - scheduledWorkflows.add(scheduledWorkflow); - } - return scheduledWorkflows; - } - - /** - * Create a dummy dataset - * - * @param datasetName the dataset name to be used - * @return the created dataset - */ - public static Dataset createDataset(String datasetName) { - Dataset ds = new Dataset(); - ds.setEcloudDatasetId("NOT_CREATED_YET-f525f64c-fea0-44bf-8c56-88f30962734c"); - ds.setDatasetId(Integer.toString(DATASETID)); - ds.setDatasetName(datasetName); - final String organizationId = "1234567890"; - ds.setOrganizationId(organizationId); - ds.setOrganizationName("OrganizationName"); - ds.setProvider(organizationId); - ds.setIntermediateProvider(organizationId); - ds.setDataProvider(organizationId); - ds.setCreatedByUserId("userId"); - ds.setCreatedDate(new Date()); - ds.setUpdatedDate(new Date()); - ds.setReplacedBy("replacedBy"); - ds.setReplaces("12345"); - ds.setCountry(Country.GREECE); - ds.setLanguage(Language.AR); - ds.setDescription("description"); - ds.setPublicationFitness(PublicationFitness.PARTIALLY_FIT); - ds.setNotes("Notes"); - return ds; - } - - /** - * Create a dummy metis user - * - * @param email the email for the dummy user - * @return the created metis user - */ - public static MetisUserView createMetisUser(String email) { - MetisUserView metisUserView = spy(new MetisUserView()); - doReturn(email).when(metisUserView).getEmail(); - doReturn(AccountRole.EUROPEANA_DATA_OFFICER).when(metisUserView).getAccountRole(); - doReturn("Organization_12345").when(metisUserView).getOrganizationId(); - doReturn("OrganizationName").when(metisUserView).getOrganizationName(); - doReturn(true).when(metisUserView).isMetisUserFlag(); - doReturn("FirstName").when(metisUserView).getFirstName(); - doReturn("LastName").when(metisUserView).getLastName(); - doReturn("User_12345").when(metisUserView).getUserId(); - return metisUserView; - } - - /** - * Create a dummy subtask info - * - * @return the created subtask info - */ - public static List createListOfSubTaskInfo() { - SubTaskInfo subTaskInfo1 = new SubTaskInfo(1, "some_resource_id1", RecordState.SUCCESS, "info", - "additional info", "europeanaId", 0L); - SubTaskInfo subTaskInfo2 = new SubTaskInfo(2, "some_resource_id2", RecordState.SUCCESS, "info", - "additional info", "europeanaId", 0L); - ArrayList subTaskInfos = new ArrayList<>(); - subTaskInfos.add(subTaskInfo1); - subTaskInfos.add(subTaskInfo2); - return subTaskInfos; - } - - /** - * Create a task errors info object, which contains a list of {@link TaskErrorInfo} objects. - * - * @param numberOfErrorTypes the number of dummy error types - * @return the created task errors info - */ - public static TaskErrorsInfo createTaskErrorsInfoListWithoutIdentifiers(int numberOfErrorTypes) { - ArrayList taskErrorInfos = new ArrayList<>(); - for (int i = 0; i < numberOfErrorTypes; i++) { - TaskErrorInfo taskErrorInfo = new TaskErrorInfo("be39ef50-f77d-11e7-af0f-fa163e77119a", - String.format("Error%s", i), OCCURRENCES); - taskErrorInfos.add(taskErrorInfo); - } - return new TaskErrorsInfo(EXTERNAL_TASK_ID, taskErrorInfos); - } - - /** - * Create a task errors info object, which contains a list of {@link TaskErrorInfo} objects. These will also contain a list of - * {@link ErrorDetails} that in turn contain dummy identifiers. - * - * @param numberOfErrorTypes the number of dummy error types - * @return the created task errors info - */ - public static TaskErrorsInfo createTaskErrorsInfoListWithIdentifiers(int numberOfErrorTypes) { - ArrayList taskErrorInfos = new ArrayList<>(); - for (int i = 0; i < numberOfErrorTypes; i++) { - TaskErrorInfo taskErrorInfo = new TaskErrorInfo("be39ef50-f77d-11e7-af0f-fa163e77119a", - String.format("Error%s", i), OCCURRENCES); - ArrayList errorDetails = new ArrayList<>(); - errorDetails.add(new ErrorDetails("identifier1", "error1")); - errorDetails.add(new ErrorDetails("identifier2", "error2")); - taskErrorInfo.setErrorDetails(errorDetails); - taskErrorInfos.add(taskErrorInfo); - } - return new TaskErrorsInfo(EXTERNAL_TASK_ID, taskErrorInfos); - } - - /** - * Create a task errors info object, which contains a list of {@link TaskErrorInfo} objects. These will also contain a list of - * {@link ErrorDetails} that in turn contain dummy identifiers. - * - * @param errorType the error type to be used for the internal {@link TaskErrorInfo} - * @param message the message type to be used for the internal {@link TaskErrorInfo} - * @return the created task errors info - */ - public static TaskErrorsInfo createTaskErrorsInfoWithIdentifiers(String errorType, - String message) { - ArrayList errorDetails = new ArrayList<>(); - errorDetails.add(new ErrorDetails("identifier1", "error1")); - errorDetails.add(new ErrorDetails("identifier2", "error2")); - TaskErrorInfo taskErrorInfo1 = new TaskErrorInfo(errorType, - message, OCCURRENCES, errorDetails); - ArrayList taskErrorInfos = new ArrayList<>(); - taskErrorInfos.add(taskErrorInfo1); - - return new TaskErrorsInfo(EXTERNAL_TASK_ID, taskErrorInfos); - } - - /** - * Create a dummy {@link StatisticsReport} - * - * @return the created report - */ - public static StatisticsReport createTaskStatisticsReport() { - List nodeStatistics = new ArrayList<>(); - nodeStatistics.add(new NodeStatistics("parentpath1", "path1", "value1", 1)); - nodeStatistics.add(new NodeStatistics("parentpath2", "path2", "value2", OCCURRENCES)); - return new StatisticsReport(EXTERNAL_TASK_ID, nodeStatistics); - } - - /** - * Create a dummy dataset xslt. The xslt copies a record. - * - * @param dataset the dataset to be used for the creation of the {@link DatasetXslt} - * @return the created dataset xslt - */ - public static DatasetXslt createXslt(Dataset dataset) { - DatasetXslt datasetXslt = new DatasetXslt(dataset.getDatasetId(), - "\n" - + "\n" - + "\n" - + "\n" - + "\n" - + ""); - datasetXslt.setId(new ObjectId()); - return datasetXslt; - } - - /** - * Create a dummy list of {@link Record}s - * - * @param numberOfRecords the number of records to create - * @return the created list of records - */ - public static List createListOfRecords(int numberOfRecords) { - List records = new ArrayList<>(numberOfRecords); - for (int i = 0; i < numberOfRecords; i++) { - String domain = String.format("http://some.domain.com/id/path/%s", i); - records.add(new Record(UUID.randomUUID().toString(), - "\n" - + "\n" - + "\t\n" - + "\t\n" - + "\n")); - } - return records; - } - -} diff --git a/metis-core/pom.xml b/metis-core/pom.xml deleted file mode 100644 index 78d9a87bb3..0000000000 --- a/metis-core/pom.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - 4.0.0 - - metis-framework - eu.europeana.metis - 12.2 - - metis-core - pom - - - UTF-8 - - - - metis-core-common - metis-core-service - metis-core-rest - - \ No newline at end of file diff --git a/metis-debias/README.MD b/metis-debias/README.MD new file mode 100644 index 0000000000..456847a869 --- /dev/null +++ b/metis-debias/README.MD @@ -0,0 +1,12 @@ +# Getting Started + +### Reference Documentation + +This module includes DeBias detection algorithm functionality for Metis services. +The module includes the following submodels: + +| Module | Functionality | +|--------------------------|-------------------------------------------| +| metis-debias-detect-rest | REST client for debias detection REST API | + + diff --git a/metis-debias/metis-debias-detect-rest/.gitignore b/metis-debias/metis-debias-detect-rest/.gitignore new file mode 100644 index 0000000000..9d8c95f3c0 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/.gitignore @@ -0,0 +1,15 @@ + +/src/main/resources/application.properties + +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + + diff --git a/metis-debias/metis-debias-detect-rest/pom.xml b/metis-debias/metis-debias-detect-rest/pom.xml new file mode 100644 index 0000000000..3d5c65224d --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/pom.xml @@ -0,0 +1,118 @@ + + + 4.0.0 + + metis-debias + eu.europeana.metis + 13 + + metis-debias-detect-rest + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + eu.europeana.metis + metis-common-utils + ${project.version} + + + eu.europeana.metis + metis-common-spring-properties + ${project.version} + + + org.springframework.boot + spring-boot-starter-actuator + + + org.springframework.boot + spring-boot-starter-log4j2 + + + org.springframework.boot + spring-boot-configuration-processor + true + + + co.elastic.apm + apm-agent-attach + ${version.elastic.apm} + runtime + + + org.springdoc + springdoc-openapi-starter-webmvc-ui + ${version.springdoc-openapi-starter-webmvc-ui} + + + org.springframework.boot + spring-boot-starter-test + test + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + + + org.mockito + mockito-core + + + org.springframework + spring-test + + + eu.europeana.metis + metis-debias-detect-service + 13 + compile + + + org.wiremock + wiremock-standalone + ${version.org.wiremock} + test + + + + + + + + org.springframework.boot + spring-boot-dependencies + ${version.spring.boot} + pom + import + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${version.spring.boot} + + + + repackage + + + + + + + diff --git a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/Application.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/Application.java similarity index 65% rename from metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/Application.java rename to metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/Application.java index 79a585b3d5..85f1211cff 100644 --- a/metis-core/metis-core-rest/src/main/java/eu/europeana/metis/core/rest/Application.java +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/Application.java @@ -1,20 +1,21 @@ -package eu.europeana.metis.core.rest; +package eu.europeana.metis.debias.detect.rest; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; /** - * The Spring boot application entry point. + * The type Application. */ @SpringBootApplication public class Application { /** - * The main spring boot method. + * The entry point of application. * - * @param args application arguments + * @param args the input arguments */ public static void main(String[] args) { SpringApplication.run(Application.class, args); } + } diff --git a/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/ApplicationConfiguration.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/ApplicationConfiguration.java new file mode 100644 index 0000000000..a880e1a65b --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/ApplicationConfiguration.java @@ -0,0 +1,78 @@ +package eu.europeana.metis.debias.detect.rest.config; + +import eu.europeana.metis.debias.detect.client.DeBiasClient; +import eu.europeana.metis.debias.detect.service.BiasDetectService; +import eu.europeana.metis.utils.CustomTruststoreAppender; +import eu.europeana.metis.utils.CustomTruststoreAppender.TrustStoreConfigurationException; +import eu.europeana.metis.utils.apm.ElasticAPMConfiguration; +import java.lang.invoke.MethodHandles; +import metis.common.config.properties.TruststoreConfigurationProperties; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +/** + * The type Application configuration. + */ +@Configuration +@EnableConfigurationProperties({ElasticAPMConfiguration.class, TruststoreConfigurationProperties.class}) +@ComponentScan(basePackages = { + "eu.europeana.metis.debias.detect.rest"}) +public class ApplicationConfiguration { + + private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Value("${debias.simple.client.detect-url}") + private String detectUrl; + + @Value("${debias.simple.client.connect-timeout}") + private int connectTimeOut; + + @Value("${debias.simple.client.request-timeout}") + private int requestTimeOut; + + /** + * Instantiates a new Application configuration. + * + * @param truststoreConfigurationProperties the truststore configuration properties + * @throws TrustStoreConfigurationException the trust store configuration exception + */ + @Autowired + public ApplicationConfiguration(TruststoreConfigurationProperties truststoreConfigurationProperties) + throws TrustStoreConfigurationException { + ApplicationConfiguration.initializeTruststore(truststoreConfigurationProperties); + } + + /** + * Detect service detect service. + * + * @return the detect service + */ + @Bean + public BiasDetectService detectService() { + return new DeBiasClient(this.detectUrl, this.connectTimeOut, this.requestTimeOut); + } + + /** + * Initialize truststore. + * + * @param truststoreConfigurationProperties the truststore configuration properties + * @throws TrustStoreConfigurationException the trust store configuration exception + */ + static void initializeTruststore(TruststoreConfigurationProperties truststoreConfigurationProperties) + throws TrustStoreConfigurationException { + if (StringUtils.isNotEmpty(truststoreConfigurationProperties.getPath()) && StringUtils + .isNotEmpty(truststoreConfigurationProperties.getPassword())) { + CustomTruststoreAppender + .appendCustomTruststoreToDefault(truststoreConfigurationProperties.getPath(), + truststoreConfigurationProperties.getPassword()); + LOGGER.info("Custom truststore appended to default truststore"); + } + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/SwaggerConfig.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/SwaggerConfig.java new file mode 100644 index 0000000000..d78459f086 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/config/SwaggerConfig.java @@ -0,0 +1,31 @@ +package eu.europeana.metis.debias.detect.rest.config; + +import io.swagger.v3.oas.models.OpenAPI; +import io.swagger.v3.oas.models.info.Info; +import io.swagger.v3.oas.models.info.License; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Config for Swagger documentation + */ +@Configuration +public class SwaggerConfig { + + /** + * The open api documentation docket. + * + * @return the docket configuration + */ + @Bean + public OpenAPI openAPI() { + return new OpenAPI() + .info(new Info() + .title("DeBias REST API") + .description("DeBias REST API for Europeana") + .version("v1") + .license(new License() + .name("EUPL License v1.2") + .url("https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12"))); + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/controller/DetectionController.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/controller/DetectionController.java new file mode 100644 index 0000000000..3b383a16f9 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/controller/DetectionController.java @@ -0,0 +1,49 @@ +package eu.europeana.metis.debias.detect.rest.controller; + +import eu.europeana.metis.debias.detect.model.DeBiasResult; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; +import eu.europeana.metis.debias.detect.model.response.DetectionDeBiasResult; +import eu.europeana.metis.debias.detect.service.BiasDetectService; +import eu.europeana.metis.utils.RestEndpoints; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; + +/** + * The type Detection controller. + */ +@RestController +public class DetectionController { + + /** + * The Detect service. + */ + BiasDetectService biasDetectService; + + /** + * Instantiates a new Detection controller. + * + * @param biasDetectService the detect service + */ + @Autowired + public DetectionController(BiasDetectService biasDetectService) { + this.biasDetectService = biasDetectService; + } + + /** + * DeBias detection result. + * + * @param biasInputLiterals {@link BiasInputLiterals} the detection parameter + * @return {@link DetectionDeBiasResult} response of result + */ + @PostMapping(value = RestEndpoints.DEBIAS_DETECTION, consumes = MediaType.APPLICATION_JSON_VALUE, produces = { + MediaType.APPLICATION_JSON_VALUE}) + @Operation(description = "DeBias a list of values", responses = {@ApiResponse(responseCode = "200"),@ApiResponse(responseCode = "422")}) + public DeBiasResult debias(@RequestBody BiasInputLiterals biasInputLiterals) { + return biasDetectService.detect(biasInputLiterals); + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ExceptionResponseHandler.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ExceptionResponseHandler.java new file mode 100644 index 0000000000..0713693feb --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ExceptionResponseHandler.java @@ -0,0 +1,113 @@ +package eu.europeana.metis.debias.detect.rest.exceptions; + +import eu.europeana.metis.debias.detect.exceptions.DeBiasBadRequestException; +import eu.europeana.metis.debias.detect.exceptions.DeBiasInternalServerException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import org.springframework.core.annotation.AnnotationUtils; +import org.springframework.http.HttpStatus; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.web.HttpMediaTypeNotSupportedException; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.ResponseStatus; + +/** + * The type Exception response handler. + */ +@ControllerAdvice +public class ExceptionResponseHandler { + + /** + * Handle generic server error. + * + * @param response the response + * @param request the request + * @param exception the exception + * @return the server error + */ + @ResponseBody + @ExceptionHandler({Exception.class}) + public ServerError handleResponse(HttpServletResponse response, HttpServletRequest request, + Exception exception) { + final ResponseStatus annotationResponseStatus = AnnotationUtils + .findAnnotation(exception.getClass(), ResponseStatus.class); + HttpStatus status = annotationResponseStatus == null ? HttpStatus.INTERNAL_SERVER_ERROR + : annotationResponseStatus.value(); + response.setStatus(status.value()); + return new ServerError(response.getStatus(), exception.getMessage()); + } + + /** + * Handle Bad Request response server error. + * + * @param response the response + * @param request the request + * @param exception the exception + * @return the server error + */ + @ResponseBody + @ExceptionHandler({DeBiasBadRequestException.class}) + public ServerError handleResponse(HttpServletResponse response, HttpServletRequest request, + DeBiasBadRequestException exception) { + final ResponseStatus annotationResponseStatus = AnnotationUtils + .findAnnotation(exception.getClass(), ResponseStatus.class); + HttpStatus status = annotationResponseStatus == null ? HttpStatus.INTERNAL_SERVER_ERROR + : annotationResponseStatus.value(); + response.setStatus(status.value()); + return new ServerError(response.getStatus(), exception.getMessage()); + } + + /** + * Handle Internal Server response server error. + * + * @param response the response + * @param request the request + * @param exception the exception + * @return the server error + */ + @ResponseBody + @ExceptionHandler({DeBiasInternalServerException.class}) + public ServerError handleResponse(HttpServletResponse response, HttpServletRequest request, + DeBiasInternalServerException exception) { + final ResponseStatus annotationResponseStatus = AnnotationUtils + .findAnnotation(exception.getClass(), ResponseStatus.class); + HttpStatus status = annotationResponseStatus == null ? HttpStatus.INTERNAL_SERVER_ERROR + : annotationResponseStatus.value(); + response.setStatus(status.value()); + return new ServerError(response.getStatus(), exception.getMessage()); + } + + /** + * Handle media not supported response server error. + * + * @param response the response + * @param request the request + * @param exception the exception + * @return the server error + */ + @ResponseBody + @ExceptionHandler(HttpMediaTypeNotSupportedException.class) + public ServerError handleResponse(HttpServletResponse response, HttpServletRequest request, + HttpMediaTypeNotSupportedException exception) { + response.setStatus(HttpStatus.BAD_REQUEST.value()); + return new ServerError(response.getStatus(), exception.getMessage()); + } + + /** + * Handle response server error. + * + * @param response the response + * @param request the request + * @param exception the exception + * @return the server error + */ + @ResponseBody + @ExceptionHandler(HttpMessageNotReadableException.class) + public ServerError handleResponse(HttpServletResponse response, HttpServletRequest request, + HttpMessageNotReadableException exception) { + response.setStatus(HttpStatus.BAD_REQUEST.value()); + return new ServerError(response.getStatus(), exception.getMessage()); + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ServerError.java b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ServerError.java new file mode 100644 index 0000000000..6218b53141 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/java/eu/europeana/metis/debias/detect/rest/exceptions/ServerError.java @@ -0,0 +1,64 @@ +package eu.europeana.metis.debias.detect.rest.exceptions; + +/** + * The type Server error. + */ +public class ServerError { + + private int statusCode; + private String errorMessage; + + /** + * Instantiates a new Server error. + */ + public ServerError() { + // Required for serialization and deserialization + } + + /** + * Instantiates a new Server error. + * + * @param statusCode the status code + * @param errorMessage the error message + */ + public ServerError(int statusCode, String errorMessage) { + this.statusCode = statusCode; + this.errorMessage = errorMessage; + } + + /** + * Gets error message. + * + * @return the error message + */ + public String getErrorMessage() { + return errorMessage; + } + + /** + * Sets error message. + * + * @param errorMessage the error message + */ + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + /** + * Gets status code. + * + * @return the status code + */ + public int getStatusCode() { + return statusCode; + } + + /** + * Sets status code. + * + * @param statusCode the status code + */ + public void setStatusCode(int statusCode) { + this.statusCode = statusCode; + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/main/resources/application.properties.example b/metis-debias/metis-debias-detect-rest/src/main/resources/application.properties.example new file mode 100644 index 0000000000..403f5bf4e3 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/main/resources/application.properties.example @@ -0,0 +1,38 @@ +# Spring +spring.application.name=metis-debias-rest +logging.config=/data/logging/log4j2.xml +#logging.config=log4j2.xml +spring.servlet.multipart.max-file-size=5MB +spring.servlet.multipart.max-request-size=5MB +spring.autoconfigure.exclude=\ + org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration, \ + org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration +springdoc.packages-to-scan=eu.europeana.metis.debias.detect.rest +springdoc.paths-to-match=/** + +# Truststore +truststore.path= +truststore.password= + +# DeBias detect +debias.simple.client.detect-url = +debias.simple.client.connect-timeout = +debias.simple.client.request-timeout = + +#Actuator +management.endpoint.health.probes.enabled=true +management.health.livenessState.enabled=true +management.health.readinessState.enabled=true + +# Elastic APM +elastic.apm.enabled=false +elastic.apm.recording=true +elastic.apm.instrument=true +elastic.apm.service_name=metis-debias +elastic.apm.server_url=https://logstash-apm.eanadev.org:8200 +elastic.apm.environment=local +elastic.apm.application_packages=eu.europeana +elastic.apm.log_level=ERROR +elastic.apm.capture_body=all +elastic.apm.capture_headers=true +elastic.apm.metrics_interval=5s diff --git a/metis-core/metis-core-rest/src/main/resources/log4j2.xml b/metis-debias/metis-debias-detect-rest/src/main/resources/log4j2.xml similarity index 100% rename from metis-core/metis-core-rest/src/main/resources/log4j2.xml rename to metis-debias/metis-debias-detect-rest/src/main/resources/log4j2.xml diff --git a/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTest.java b/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTest.java new file mode 100644 index 0000000000..eaaea4731f --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTest.java @@ -0,0 +1,188 @@ +package eu.europeana.metis.debias.detect.rest.controller; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.europeana.metis.debias.detect.client.DeBiasClient; +import eu.europeana.metis.debias.detect.exceptions.DeBiasBadRequestException; +import eu.europeana.metis.debias.detect.exceptions.DeBiasInternalServerException; +import eu.europeana.metis.debias.detect.rest.exceptions.ExceptionResponseHandler; +import eu.europeana.metis.debias.detect.model.error.Detail; +import eu.europeana.metis.debias.detect.model.error.ErrorDeBiasResult; +import eu.europeana.metis.debias.detect.model.error.Input; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; +import eu.europeana.metis.debias.detect.model.response.DetectionDeBiasResult; +import eu.europeana.metis.debias.detect.model.response.Metadata; +import eu.europeana.metis.debias.detect.model.response.Tag; +import eu.europeana.metis.debias.detect.model.response.ValueDetection; +import eu.europeana.metis.debias.detect.service.BiasDetectService; +import eu.europeana.metis.utils.RestEndpoints; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; +import org.springframework.test.web.servlet.result.MockMvcResultHandlers; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; + +class DetectionControllerTest { + + private MockMvc mockMvc; + private BiasDetectService biasDetectService; + + private static String getDetectionParameterJson() throws JsonProcessingException { + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + biasInputLiterals.setLanguage("en"); + ObjectMapper mapper = new ObjectMapper(); + + return mapper.writeValueAsString(biasInputLiterals); + } + + @BeforeEach + void setUp() { + biasDetectService = mock(DeBiasClient.class); + DetectionController detectionController = new DetectionController(biasDetectService); + mockMvc = MockMvcBuilders.standaloneSetup(detectionController) + .setControllerAdvice(new ExceptionResponseHandler()) + .alwaysDo(MockMvcResultHandlers.print()) + .build(); + } + + @Test + void debias_detect_completeRequest_expectSuccess() throws Exception { + DetectionDeBiasResult detectionResult = new DetectionDeBiasResult(); + ValueDetection valueDetection1 = new ValueDetection(); + valueDetection1.setLanguage("en"); + valueDetection1.setLiteral("sample title of aboriginal and addict"); + Tag tag1 = new Tag(); + tag1.setLength(10); + tag1.setStart(16); + tag1.setEnd(26); + tag1.setUri("http://www.example.org/debias#t_2_en"); + Tag tag2 = new Tag(); + tag2.setStart(31); + tag2.setEnd(37); + tag2.setUri("http://www.example.org/debias#t_3_en"); + tag2.setLength(6); + valueDetection1.setTags(List.of(tag1, tag2)); + + detectionResult.setDetections(List.of(valueDetection1)); + Metadata metadata = new Metadata(); + detectionResult.setMetadata(metadata); + + String detectionParameterJson = getDetectionParameterJson(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJson = mapper.writeValueAsString(detectionResult); + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenReturn(detectionResult); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(200)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().string(expectedJson)); + } + + @Test + void debias_detect_NoLanguageRequest_expectSuccess() throws Exception { + ErrorDeBiasResult errorResult = new ErrorDeBiasResult(); + Detail detail = new Detail(); + Input input = new Input(); + input.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + detail.setInput(input); + detail.setLoc(List.of("body", "language")); + detail.setMsg("Field required"); + detail.setUrl("https://errors.pydantic.dev/2.5/v/missing"); + errorResult.setDetailList(List.of()); + + String detectionParameterJson = getDetectionParameterJson(); + ObjectMapper mapper = new ObjectMapper(); + String expectedJson = mapper.writeValueAsString(errorResult); + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenReturn(errorResult); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(200)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().string(expectedJson)); + } + + @Test + void debias_detect_emptyContentTypeRequest_expectContentTypeNotSupported() throws Exception { + DetectionDeBiasResult detectionResult = new DetectionDeBiasResult(); + detectionResult.setDetections(List.of()); + Metadata metadata = new Metadata(); + detectionResult.setMetadata(metadata); + + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenReturn(detectionResult); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .characterEncoding("utf-8")) + .andExpect(status().is(400)) + .andExpect(jsonPath("$.errorMessage", containsString("Content-Type is not supported"))); + } + + @Test + void debias_detect_noBodyRequest_expectBadRequest() throws Exception { + DetectionDeBiasResult detectionResult = new DetectionDeBiasResult(); + detectionResult.setDetections(List.of()); + Metadata metadata = new Metadata(); + detectionResult.setMetadata(metadata); + + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenReturn(detectionResult); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .content("") + .characterEncoding("utf-8")) + .andExpect(status().is(400)) + .andExpect(jsonPath("$.errorMessage", containsString("Required request body is missing"))); + } + + @Test + void debias_detect_expectBadRequest() throws Exception { + String detectionParameterJson = getDetectionParameterJson(); + + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenThrow(new DeBiasBadRequestException("Unprocessable Entity")); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .content(detectionParameterJson) + .characterEncoding("utf-8")) + .andExpect(status().is(400)) + .andExpect(jsonPath("$.errorMessage", containsString("Unprocessable Entity"))); + } + + @Test + void debias_detect_expectInternalServerError() throws Exception { + String detectionParameterJson = getDetectionParameterJson(); + + when(biasDetectService.detect(any(BiasInputLiterals.class))).thenThrow( + new DeBiasInternalServerException("Internal Server Error")); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .content(detectionParameterJson) + .characterEncoding("utf-8")) + .andExpect(status().is(500)) + .andExpect(jsonPath("$.errorMessage", containsString("Internal Server Error"))); + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTestIT.java b/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTestIT.java new file mode 100644 index 0000000000..18cd324fea --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/java/eu/europeana/metis/debias/detect/rest/controller/DetectionControllerTestIT.java @@ -0,0 +1,226 @@ +package eu.europeana.metis.debias.detect.rest.controller; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.common.ConsoleNotifier; +import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; +import eu.europeana.metis.debias.detect.client.DeBiasClient; +import eu.europeana.metis.debias.detect.rest.exceptions.ExceptionResponseHandler; +import eu.europeana.metis.debias.detect.service.BiasDetectService; +import eu.europeana.metis.utils.RestEndpoints; +import java.util.List; +import java.util.Objects; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; +import org.springframework.test.web.servlet.result.MockMvcResultHandlers; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; + +class DetectionControllerTestIT { + + private static final String DEBIAS_HOST = "debias.host"; + private static WireMockServer wireMockServer; + private MockMvc mockMvc; + + @BeforeAll + static void createWireMock() { + wireMockServer = new WireMockServer(wireMockConfig() + .dynamicPort() + .enableBrowserProxying(true) + .notifier(new ConsoleNotifier(true))); + wireMockServer.start(); + + JvmProxyConfigurer.configureFor(wireMockServer); + } + + @AfterAll + static void tearDownWireMock() { + wireMockServer.stop(); + } + + @BeforeEach + void setUp() { + final BiasDetectService biasDetectService = new DeBiasClient("http://debias.host", 300, 300); + final DetectionController detectionController = new DetectionController(biasDetectService); + mockMvc = MockMvcBuilders.standaloneSetup(detectionController) + .setControllerAdvice(new ExceptionResponseHandler()) + .alwaysDo(MockMvcResultHandlers.print()) + .build(); + } + + @Test + void detection_successResponse() throws Exception { + final String successResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_success_response.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withBody(successResponse) + .withStatus(200))); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + biasInputLiterals.setLanguage("en"); + ObjectMapper mapper = new ObjectMapper(); + String detectionParameterJson = mapper.writeValueAsString(biasInputLiterals); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(200)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().json(successResponse)); + } + + @Test + void detection_error_null_language_successResponse() throws Exception { + final String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_null_language.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withStatus(422) + .withBody(errorResponse) + )); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + biasInputLiterals.setLanguage(null); + ObjectMapper mapper = new ObjectMapper(); + String detectionParameterJson = mapper.writeValueAsString(biasInputLiterals); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(400)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().json( + "{\"statusCode\":400,\"errorMessage\":\"422 UNPROCESSABLE_ENTITY string_type Input should be a valid string\"}")); + } + + @Test + void detection_error_null_values_successResponse() throws Exception { + final String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_null_values.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withStatus(422) + .withBody(errorResponse) + )); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setValues(null); + biasInputLiterals.setLanguage("en"); + ObjectMapper mapper = new ObjectMapper(); + String detectionParameterJson = mapper.writeValueAsString(biasInputLiterals); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(400)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().json( + "{\"statusCode\":400,\"errorMessage\":\"422 UNPROCESSABLE_ENTITY list_type Input should be a valid list\"}")); + } + + @Test + void detection_error_null_body_successResponse() throws Exception { + String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_null_body.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withStatus(422) + .withBody(errorResponse) + )); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content("{}")) + .andExpect(status().is(400)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect( + content().json("{\"statusCode\":400,\"errorMessage\":\"422 UNPROCESSABLE_ENTITY missing Field required\"}")); + } + + @Test + void detection_error_bad_gateway_successResponse() throws Exception { + final String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_bad_gateway.json")) + .readAllBytes()); + final String errorAPIResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_bad_gateway.html")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withStatus(502) + .withBody(errorAPIResponse))); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + biasInputLiterals.setLanguage(null); + ObjectMapper mapper = new ObjectMapper(); + String detectionParameterJson = mapper.writeValueAsString(biasInputLiterals); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content(detectionParameterJson)) + .andExpect(status().is(500)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().json(errorResponse)); + } + + @Test + void detection_error_missing_body_successResponse() throws Exception { + String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_missing_body.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIAS_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withStatus(422) + )); + + mockMvc.perform(MockMvcRequestBuilders.post(RestEndpoints.DEBIAS_DETECTION) + .contentType(MediaType.APPLICATION_JSON) + .characterEncoding("utf-8") + .content("{}")) + .andExpect(status().is(400)) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andExpect(content().json(errorResponse)); + } +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.html b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.html new file mode 100644 index 0000000000..7735b6164e --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.html @@ -0,0 +1,7 @@ + +502 Bad Gateway + +

502 Bad Gateway

+
nginx/1.18.0 (Ubuntu)
+ + diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.json new file mode 100644 index 0000000000..6198e9494a --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_bad_gateway.json @@ -0,0 +1,3 @@ +{ + "errorMessage": "I/O error on POST request for \"http://debias.host\": Unexpected character ('<' (code 60)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n at [Source: (String)\"\n502 Bad Gateway\n\n

502 Bad Gateway

\n
nginx/1.18.0 (Ubuntu)
\n\n\n\"; line: 1, column: 2]" +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_body.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_body.json new file mode 100644 index 0000000000..0f66c40622 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_body.json @@ -0,0 +1 @@ +{"errorMessage":"422 Unprocessable Entity"} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_language.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_language.json new file mode 100644 index 0000000000..34b3057ec0 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_language.json @@ -0,0 +1,20 @@ +{ + "detail": [ + { + "type": "missing", + "loc": [ + "body", + "language" + ], + "msg": "Field required", + "input": { + "values": [ + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch" + ] + }, + "url": "https://errors.pydantic.dev/2.5/v/missing" + } + ] +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_values.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_values.json new file mode 100644 index 0000000000..4c57cbfe9e --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_missing_values.json @@ -0,0 +1,16 @@ +{ + "detail": [ + { + "type": "missing", + "loc": [ + "body", + "values" + ], + "msg": "Field required", + "input": { + "language": "en" + }, + "url": "https://errors.pydantic.dev/2.5/v/missing" + } + ] +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_body.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_body.json new file mode 100644 index 0000000000..5545ccf71b --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_body.json @@ -0,0 +1,24 @@ +{ + "detail": [ + { + "type": "missing", + "loc": [ + "body", + "language" + ], + "msg": "Field required", + "input": {}, + "url": "https://errors.pydantic.dev/2.5/v/missing" + }, + { + "type": "missing", + "loc": [ + "body", + "values" + ], + "msg": "Field required", + "input": {}, + "url": "https://errors.pydantic.dev/2.5/v/missing" + } + ] +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_language.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_language.json new file mode 100644 index 0000000000..4acfd3c682 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_language.json @@ -0,0 +1,14 @@ +{ + "detail": [ + { + "type": "string_type", + "loc": [ + "body", + "language" + ], + "msg": "Input should be a valid string", + "input": null, + "url": "https://errors.pydantic.dev/2.5/v/string_type" + } + ] +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_values.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_values.json new file mode 100644 index 0000000000..0956950056 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_error_null_values.json @@ -0,0 +1,14 @@ +{ + "detail": [ + { + "type": "list_type", + "loc": [ + "body", + "values" + ], + "msg": "Input should be a valid list", + "input": null, + "url": "https://errors.pydantic.dev/2.5/v/list_type" + } + ] +} diff --git a/metis-debias/metis-debias-detect-rest/src/test/resources/sample_success_response.json b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_success_response.json new file mode 100644 index 0000000000..d1933c35d7 --- /dev/null +++ b/metis-debias/metis-debias-detect-rest/src/test/resources/sample_success_response.json @@ -0,0 +1,51 @@ +{ + "metadata": { + "annotator": "de-bias", + "thesaurus": null, + "date": "2024-08-27T12:04:17" + }, + "results": [ + { + "language": "en", + "literal": "sample title of aboriginal and addict", + "tags": [ + { + "uri": "http://www.example.org/debias#t_2_en", + "start": 16, + "end": 26, + "length": 10 + }, + { + "uri": "http://www.example.org/debias#t_3_en", + "start": 31, + "end": 37, + "length": 6 + } + ] + }, + { + "language": "en", + "literal": "a second addict sample title", + "tags": [ + { + "uri": "http://www.example.org/debias#t_3_en", + "start": 9, + "end": 15, + "length": 6 + } + ] + }, + { + "language": "en", + "literal": "this is a demo of master and slave branch", + "tags": [ + { + "uri": "http://www.example.org/debias#t_198_en", + "start": 29, + "end": 34, + "length": 5 + } + ] + } + ] +} diff --git a/metis-debias/metis-debias-detect-service/.gitignore b/metis-debias/metis-debias-detect-service/.gitignore new file mode 100644 index 0000000000..549e00a2a9 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/.gitignore @@ -0,0 +1,33 @@ +HELP.md +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ diff --git a/metis-debias/metis-debias-detect-service/pom.xml b/metis-debias/metis-debias-detect-service/pom.xml new file mode 100644 index 0000000000..627515f180 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/pom.xml @@ -0,0 +1,57 @@ + + + 4.0.0 + + metis-debias + eu.europeana.metis + 13 + + metis-debias-detect-service + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-test + test + + + jakarta.xml.bind + jakarta.xml.bind-api + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + + + org.wiremock + wiremock-standalone + ${version.org.wiremock} + test + + + org.apache.httpcomponents.client5 + httpclient5 + ${version.apache.httpclient} + + + + + + org.springframework + spring-framework-bom + ${version.spring} + pom + import + + + + + diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/client/DeBiasClient.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/client/DeBiasClient.java new file mode 100644 index 0000000000..14e366ffdc --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/client/DeBiasClient.java @@ -0,0 +1,120 @@ +package eu.europeana.metis.debias.detect.client; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import eu.europeana.metis.debias.detect.exceptions.DeBiasBadRequestException; +import eu.europeana.metis.debias.detect.exceptions.DeBiasInternalServerException; +import eu.europeana.metis.debias.detect.model.DeBiasResult; +import eu.europeana.metis.debias.detect.model.error.ErrorDeBiasResult; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; +import eu.europeana.metis.debias.detect.model.response.DetectionDeBiasResult; +import eu.europeana.metis.debias.detect.service.BiasDetectService; +import java.lang.invoke.MethodHandles; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.http.converter.StringHttpMessageConverter; +import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; +import org.springframework.web.client.RestClient; + +/** + * The type DeBias client. + */ +public class DeBiasClient implements BiasDetectService { + + private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final String apiURL; + private final Integer connectTimeOut; + private final Integer requestTimeout; + + /** + * Instantiates a new DeBias client. + * + * @param apiURL the api url + * @param connectTimeOut the connect time out + * @param requestTimeout the request time out + */ + public DeBiasClient(String apiURL, int connectTimeOut, int requestTimeout) { + this.apiURL = Objects.requireNonNull(apiURL, "api URL is required"); + this.connectTimeOut = connectTimeOut; + this.requestTimeout = requestTimeout; + } + + /** + * Method to detect biased terms according to the input values provided + * + * @param biasInputLiterals {@link BiasInputLiterals} language and values + * @return {@link DeBiasResult } containing metadata and values of the detection or error + */ + @Override + public DeBiasResult detect(BiasInputLiterals biasInputLiterals) { + URI uri; + try { + uri = new URI(this.apiURL); + } catch (URISyntaxException | RuntimeException e) { + LOGGER.error("Error with API URL", e); + throw new IllegalArgumentException("Not valid API url"); + } + + final HttpComponentsClientHttpRequestFactory clientHttpRequestFactory = new HttpComponentsClientHttpRequestFactory(); + clientHttpRequestFactory.setConnectTimeout(this.connectTimeOut); + clientHttpRequestFactory.setConnectionRequestTimeout(this.requestTimeout); + final RestClient restClient = RestClient.builder() + .messageConverters(httpMessageConverters -> { + httpMessageConverters.add(new StringHttpMessageConverter()); + httpMessageConverters.add(new MappingJackson2HttpMessageConverter()); + }) + .build(); + + final ResponseEntity response = restClient + .post() + .uri(uri) + .accept(MediaType.APPLICATION_JSON) + .contentType(MediaType.APPLICATION_JSON) + .body(biasInputLiterals) + .exchange(((clientRequest, clientResponse) -> { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.ALWAYS); + mapper.registerModule(new JavaTimeModule()); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + if (clientResponse.getStatusCode().is2xxSuccessful()) { + DetectionDeBiasResult result = mapper.readValue(clientResponse.getBody(), DetectionDeBiasResult.class); + return new ResponseEntity<>(result, clientResponse.getStatusCode()); + } else { + String errorResponse = new String(clientResponse.getBody().readAllBytes(), StandardCharsets.UTF_8); + if (!errorResponse.isBlank()) { + ErrorDeBiasResult result = mapper.readValue(errorResponse, ErrorDeBiasResult.class); + if (result.getDetailList() !=null) { + throw new DeBiasBadRequestException(clientResponse.getStatusCode() + " " + + result.getDetailList().getFirst().getType() + " " + + result.getDetailList().getFirst().getMsg()); + } else { + throw new DeBiasBadRequestException(errorResponse); + } + } else if (clientResponse.getStatusCode().is5xxServerError()) { + throw new DeBiasInternalServerException(clientResponse.getStatusCode().value()+" "+clientResponse.getStatusText()); + } else if (clientResponse.getStatusCode().is4xxClientError()) { + throw new DeBiasBadRequestException(clientResponse.getStatusCode().value()+" "+clientResponse.getStatusText()); + } + } + return new ResponseEntity<>(clientResponse.getStatusCode()); + }) + ); + + if (response.getStatusCode().is2xxSuccessful()) { + LOGGER.info("Detection processed successfully!"); + } else { + LOGGER.warn("Failed to process request. Response code: {}", response.getStatusCode().value()); + } + return response.getBody(); + } + +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasBadRequestException.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasBadRequestException.java new file mode 100644 index 0000000000..5113498aa9 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasBadRequestException.java @@ -0,0 +1,25 @@ +package eu.europeana.metis.debias.detect.exceptions; + +import java.io.Serial; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.ResponseStatus; + +/** + * The type DeBias bad request exception. + */ +@ResponseStatus(value = HttpStatus.BAD_REQUEST, reason = "DeBias detection bad request") +public class DeBiasBadRequestException extends RuntimeException { + + @Serial + private static final long serialVersionUID = -5859207750420173804L; + + /** + * Instantiates a new Debias bad request exception. + * + * @param message the message + */ + public DeBiasBadRequestException(String message) { + super(message); + } + +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasInternalServerException.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasInternalServerException.java new file mode 100644 index 0000000000..e482745030 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/exceptions/DeBiasInternalServerException.java @@ -0,0 +1,34 @@ +package eu.europeana.metis.debias.detect.exceptions; + +import java.io.Serial; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.ResponseStatus; + +/** + * The type DeBias exception. + */ +@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR, reason = "DeBias detection internal server error") +public class DeBiasInternalServerException extends RuntimeException { + + @Serial + private static final long serialVersionUID = -5671884493038169899L; + + /** + * Constructs a new exception with the specified detail message. + * + * @param message the detail message. The detail message is saved for later retrieval by the {@link #getMessage()} method. + */ + public DeBiasInternalServerException(String message) { + super(message); + } + + /** + * Instantiates a new DeBias exception. + * + * @param message the message + * @param cause the cause + */ + public DeBiasInternalServerException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/DeBiasResult.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/DeBiasResult.java new file mode 100644 index 0000000000..0ba9c62a2a --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/DeBiasResult.java @@ -0,0 +1,8 @@ +package eu.europeana.metis.debias.detect.model; + +/** + * DeBias result interface + */ +public interface DeBiasResult { + // common interface for De Bias API responses +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Detail.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Detail.java new file mode 100644 index 0000000000..b22eb19f16 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Detail.java @@ -0,0 +1,108 @@ +package eu.europeana.metis.debias.detect.model.error; + +import java.util.Collections; +import java.util.List; + +/** + * The type Detail. + */ +public class Detail { + + private String type; + private List loc; + private String msg; + private Input input; + private String url; + + /** + * Gets type. + * + * @return the type + */ + public String getType() { + return type; + } + + /** + * Sets type. + * + * @param type the type + */ + public void setType(String type) { + this.type = type; + } + + /** + * Gets loc. + * + * @return the loc + */ + public List getLoc() { + return loc; + } + + /** + * Sets loc. + * + * @param loc the loc + */ + public void setLoc(List loc) { + if (loc != null) { + this.loc = Collections.unmodifiableList(loc); + } + } + + /** + * Gets msg. + * + * @return the msg + */ + public String getMsg() { + return msg; + } + + /** + * Sets msg. + * + * @param msg the msg + */ + public void setMsg(String msg) { + this.msg = msg; + } + + /** + * Gets input. + * + * @return the input + */ + public Input getInput() { + return input; + } + + /** + * Sets input. + * + * @param input the input + */ + public void setInput(Input input) { + this.input = input; + } + + /** + * Gets url. + * + * @return the url + */ + public String getUrl() { + return url; + } + + /** + * Sets url. + * + * @param url the url + */ + public void setUrl(String url) { + this.url = url; + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/ErrorDeBiasResult.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/ErrorDeBiasResult.java new file mode 100644 index 0000000000..c725362018 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/ErrorDeBiasResult.java @@ -0,0 +1,35 @@ +package eu.europeana.metis.debias.detect.model.error; + +import com.fasterxml.jackson.annotation.JsonProperty; +import eu.europeana.metis.debias.detect.model.DeBiasResult; +import java.util.Collections; +import java.util.List; + +/** + * The type Error result. + */ +public class ErrorDeBiasResult implements DeBiasResult { + + @JsonProperty("detail") + private List detailList; + + /** + * Gets detail list. + * + * @return the detail list + */ + public List getDetailList() { + return detailList; + } + + /** + * Sets detail list. + * + * @param detailList the detail list + */ + public void setDetailList(List detailList) { + if (detailList != null) { + this.detailList = Collections.unmodifiableList(detailList); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Input.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Input.java new file mode 100644 index 0000000000..c7db96b545 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/error/Input.java @@ -0,0 +1,35 @@ +package eu.europeana.metis.debias.detect.model.error; + +import java.util.Collections; +import java.util.List; + +/** + * The type Input. + */ +public class Input { + + /** + * The Values. + */ + List values; + + /** + * Gets values. + * + * @return the values + */ + public List getValues() { + return values; + } + + /** + * Sets values. + * + * @param values the values + */ + public void setValues(List values) { + if (values != null) { + this.values = Collections.unmodifiableList(values); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/request/BiasInputLiterals.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/request/BiasInputLiterals.java new file mode 100644 index 0000000000..3ebd31210d --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/request/BiasInputLiterals.java @@ -0,0 +1,52 @@ +package eu.europeana.metis.debias.detect.model.request; + +import java.util.Collections; +import java.util.List; + + +/** + * The type Bias input literals. + */ +public class BiasInputLiterals { + + private String language; + private List values; + + /** + * Gets language. + * + * @return the language 2-letter code ISO 6391 + */ + public String getLanguage() { + return language; + } + + /** + * Sets language. + * + * @param language the language 2-letter code ISO 6391 + */ + public void setLanguage(String language) { + this.language = language; + } + + /** + * Gets values. + * + * @return the values + */ + public List getValues() { + return values; + } + + /** + * Sets values. + * + * @param values the values + */ + public void setValues(List values) { + if (values != null) { + this.values = Collections.unmodifiableList(values); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/DetectionDeBiasResult.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/DetectionDeBiasResult.java new file mode 100644 index 0000000000..ad15d04167 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/DetectionDeBiasResult.java @@ -0,0 +1,54 @@ +package eu.europeana.metis.debias.detect.model.response; + +import com.fasterxml.jackson.annotation.JsonProperty; +import eu.europeana.metis.debias.detect.model.DeBiasResult; +import java.util.Collections; +import java.util.List; + +/** + * The type Detection result. + */ +public class DetectionDeBiasResult implements DeBiasResult { + + private Metadata metadata; + @JsonProperty("results") + private List detections; + + /** + * Gets metadata. + * + * @return the metadata + */ + public Metadata getMetadata() { + return metadata; + } + + /** + * Sets metadata. + * + * @param metadata the metadata + */ + public void setMetadata(Metadata metadata) { + this.metadata = metadata; + } + + /** + * Gets detections. + * + * @return the detections + */ + public List getDetections() { + return detections; + } + + /** + * Sets detections. + * + * @param detections the detections + */ + public void setDetections(List detections) { + if (detections != null) { + this.detections = Collections.unmodifiableList(detections); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Metadata.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Metadata.java new file mode 100644 index 0000000000..e690e874cc --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Metadata.java @@ -0,0 +1,75 @@ +package eu.europeana.metis.debias.detect.model.response; + +import com.fasterxml.jackson.annotation.JsonFormat; +import java.util.Date; + +/** + * The type Metadata. + */ +public class Metadata { + + private String annotator; + private String thesaurus; + @JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss") + private Date date; + + /** + * Gets annotator. + * + * @return the annotator + */ + public String getAnnotator() { + return annotator; + } + + /** + * Sets annotator. + * + * @param annotator the annotator + */ + public void setAnnotator(String annotator) { + this.annotator = annotator; + } + + /** + * Gets thesaurus version. + * + * @return the thesaurus version + */ + public String getThesaurus() { + return thesaurus; + } + + /** + * Sets thesaurus version. + * + * @param thesaurus the thesaurus version + */ + public void setThesaurus(String thesaurus) { + this.thesaurus = thesaurus; + } + + /** + * Gets date. + * + * @return the date + */ + public Date getDate() { + if (date != null) { + return new Date(date.getTime()); + } else { + return null; + } + } + + /** + * Sets date. + * + * @param date the date + */ + public void setDate(Date date) { + if (date != null) { + this.date = new Date(date.getTime()); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Tag.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Tag.java new file mode 100644 index 0000000000..62ff8c1124 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/Tag.java @@ -0,0 +1,84 @@ +package eu.europeana.metis.debias.detect.model.response; + +/** + * The type Tag. + */ +public class Tag { + private int start; + private int end; + private int length; + private String uri; + + /** + * Gets start. + * + * @return the start + */ + public int getStart() { + return start; + } + + /** + * Sets start. + * + * @param start the start + */ + public void setStart(int start) { + this.start = start; + } + + /** + * Gets end. + * + * @return the end + */ + public int getEnd() { + return end; + } + + /** + * Sets end. + * + * @param end the end + */ + public void setEnd(int end) { + this.end = end; + } + + /** + * Gets length. + * + * @return the length + */ + public int getLength() { + return length; + } + + /** + * Sets length. + * + * @param length the length + */ + public void setLength(int length) { + this.length = length; + } + + /** + * Gets vocabulary. + * + * @return the vocabulary + */ + public String getUri() { + return uri; + } + + /** + * Sets vocabulary. + * + * @param uri the vocabulary + */ + public void setUri(String uri) { + this.uri = uri; + } + +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/ValueDetection.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/ValueDetection.java new file mode 100644 index 0000000000..63cd02e093 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/model/response/ValueDetection.java @@ -0,0 +1,70 @@ +package eu.europeana.metis.debias.detect.model.response; + +import java.util.Collections; +import java.util.List; + +/** + * The type Value detection. + */ +public class ValueDetection { + + private String language; + private String literal; + private List tags; + + /** + * Gets language. + * + * @return the language 2-letter code ISO 6391 + */ + public String getLanguage() { + return language; + } + + /** + * Sets language. + * + * @param language the language 2-letter code ISO 6391 + */ + public void setLanguage(String language) { + this.language = language; + } + + /** + * Gets literal. + * + * @return the literal + */ + public String getLiteral() { + return literal; + } + + /** + * Sets literal. + * + * @param literal the literal + */ + public void setLiteral(String literal) { + this.literal = literal; + } + + /** + * Gets tags. + * + * @return the tags + */ + public List getTags() { + return tags; + } + + /** + * Sets tags. + * + * @param tags the tags + */ + public void setTags(List tags) { + if (tags != null) { + this.tags = Collections.unmodifiableList(tags); + } + } +} diff --git a/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/service/BiasDetectService.java b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/service/BiasDetectService.java new file mode 100644 index 0000000000..56e5ca2114 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/main/java/eu/europeana/metis/debias/detect/service/BiasDetectService.java @@ -0,0 +1,19 @@ +package eu.europeana.metis.debias.detect.service; + +import eu.europeana.metis.debias.detect.model.DeBiasResult; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; + +/** + * Implementations of this interface are able to detect biased terms given the languages + * and the terms to search, and it returns a report indicating the result of the terms. + */ +public interface BiasDetectService { + + /** + * Method to detect biased terms according to the input values provided + * + * @param biasInputLiterals language and values + * @return DeBiasResult containing metadata and values of the detection + */ + DeBiasResult detect(BiasInputLiterals biasInputLiterals); +} diff --git a/metis-debias/metis-debias-detect-service/src/test/java/eu/europeana/metis/debias/detect/client/DeBiasClientTest.java b/metis-debias/metis-debias-detect-service/src/test/java/eu/europeana/metis/debias/detect/client/DeBiasClientTest.java new file mode 100644 index 0000000000..572d9c6e85 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/test/java/eu/europeana/metis/debias/detect/client/DeBiasClientTest.java @@ -0,0 +1,131 @@ +package eu.europeana.metis.debias.detect.client; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.junit.jupiter.api.Assertions.*; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.common.ConsoleNotifier; +import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer; +import eu.europeana.metis.debias.detect.exceptions.DeBiasBadRequestException; +import eu.europeana.metis.debias.detect.model.request.BiasInputLiterals; +import eu.europeana.metis.debias.detect.model.response.DetectionDeBiasResult; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.http.MediaType; +import org.springframework.web.client.ResourceAccessException; + +class DeBiasClientTest { + + public static final String DEBIASCLIENT_HOST = "debiasclient.host"; + private static WireMockServer wireMockServer; + private final DeBiasClient debiasClient = new DeBiasClient("http://" + DEBIASCLIENT_HOST, 300, 300); + + private static void assertMetadata(DetectionDeBiasResult detectionResult) { + assertEquals("de-bias", detectionResult.getMetadata().getAnnotator()); + assertNull(detectionResult.getMetadata().getThesaurus()); + assertNotNull(detectionResult.getMetadata().getDate()); + } + + private static void assertDetections(DetectionDeBiasResult detectionResult) { + assertEquals(3, detectionResult.getDetections().size()); + assertNotNull(detectionResult.getDetections().getFirst()); + assertEquals("en", detectionResult.getDetections().getFirst().getLanguage()); + assertEquals("sample title of aboriginal and addict", detectionResult.getDetections().getFirst().getLiteral()); + } + + private static void assertFirstTag(DetectionDeBiasResult detectionResult) { + assertNotNull(detectionResult.getDetections().getFirst().getTags()); + assertNotNull(detectionResult.getDetections().getFirst().getTags().getFirst()); + assertEquals(16, detectionResult.getDetections().getFirst().getTags().getFirst().getStart()); + assertEquals(26, detectionResult.getDetections().getFirst().getTags().getFirst().getEnd()); + assertEquals(10, detectionResult.getDetections().getFirst().getTags().getFirst().getLength()); + assertEquals("http://www.example.org/debias#t_2_en", + detectionResult.getDetections().getFirst().getTags().getFirst().getUri()); + } + + @BeforeAll + static void createWireMock() { + wireMockServer = new WireMockServer(wireMockConfig() + .dynamicPort() + .enableBrowserProxying(true) + .notifier(new ConsoleNotifier(true))); + wireMockServer.start(); + JvmProxyConfigurer.configureFor(wireMockServer); + } + + @AfterAll + static void tearDownWireMock() { + wireMockServer.stop(); + } + + @Test + void detect_successResponse() throws IOException { + final String successResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_success_response.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIASCLIENT_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withBody(successResponse) + .withStatus(200))); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setLanguage("en"); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + + DetectionDeBiasResult detectionResult = (DetectionDeBiasResult) debiasClient.detect(biasInputLiterals); + + assertNotNull(detectionResult); + assertMetadata(detectionResult); + assertDetections(detectionResult); + assertFirstTag(detectionResult); + } + + @Test + void detect_errorResponse() throws IOException { + final String errorResponse = new String( + Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("sample_error_null_language.json")) + .readAllBytes()); + wireMockServer.stubFor(post("/") + .withHost(equalTo(DEBIASCLIENT_HOST)) + .willReturn(aResponse() + .withHeader("Content-Type", MediaType.APPLICATION_JSON_VALUE) + .withBody(errorResponse) + .withStatus(422))); + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setLanguage(null); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + + DeBiasBadRequestException deBiasBadRequestException = assertThrows(DeBiasBadRequestException.class, + () -> debiasClient.detect(biasInputLiterals)); + + assertNotNull(deBiasBadRequestException); + assertEquals("422 UNPROCESSABLE_ENTITY string_type Input should be a valid string", deBiasBadRequestException.getMessage()); + } + + @Test + void detect_noService_errorResponse() { + + BiasInputLiterals biasInputLiterals = new BiasInputLiterals(); + biasInputLiterals.setLanguage("en"); + biasInputLiterals.setValues(List.of( + "sample title of aboriginal and addict", + "a second addict sample title", + "this is a demo of master and slave branch")); + + assertThrows(ResourceAccessException.class, () -> debiasClient.detect(biasInputLiterals)); + } +} diff --git a/metis-debias/metis-debias-detect-service/src/test/resources/sample_error_null_language.json b/metis-debias/metis-debias-detect-service/src/test/resources/sample_error_null_language.json new file mode 100644 index 0000000000..4acfd3c682 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/test/resources/sample_error_null_language.json @@ -0,0 +1,14 @@ +{ + "detail": [ + { + "type": "string_type", + "loc": [ + "body", + "language" + ], + "msg": "Input should be a valid string", + "input": null, + "url": "https://errors.pydantic.dev/2.5/v/string_type" + } + ] +} diff --git a/metis-debias/metis-debias-detect-service/src/test/resources/sample_success_response.json b/metis-debias/metis-debias-detect-service/src/test/resources/sample_success_response.json new file mode 100644 index 0000000000..d1933c35d7 --- /dev/null +++ b/metis-debias/metis-debias-detect-service/src/test/resources/sample_success_response.json @@ -0,0 +1,51 @@ +{ + "metadata": { + "annotator": "de-bias", + "thesaurus": null, + "date": "2024-08-27T12:04:17" + }, + "results": [ + { + "language": "en", + "literal": "sample title of aboriginal and addict", + "tags": [ + { + "uri": "http://www.example.org/debias#t_2_en", + "start": 16, + "end": 26, + "length": 10 + }, + { + "uri": "http://www.example.org/debias#t_3_en", + "start": 31, + "end": 37, + "length": 6 + } + ] + }, + { + "language": "en", + "literal": "a second addict sample title", + "tags": [ + { + "uri": "http://www.example.org/debias#t_3_en", + "start": 9, + "end": 15, + "length": 6 + } + ] + }, + { + "language": "en", + "literal": "this is a demo of master and slave branch", + "tags": [ + { + "uri": "http://www.example.org/debias#t_198_en", + "start": 29, + "end": 34, + "length": 5 + } + ] + } + ] +} diff --git a/metis-debias/pom.xml b/metis-debias/pom.xml new file mode 100644 index 0000000000..1ab9ee269a --- /dev/null +++ b/metis-debias/pom.xml @@ -0,0 +1,17 @@ + + 4.0.0 + + eu.europeana.metis + 13 + metis-framework + + + metis-debias + pom + + + metis-debias-detect-rest + metis-debias-detect-service + + diff --git a/metis-dereference/metis-dereference-common/pom.xml b/metis-dereference/metis-dereference-common/pom.xml index ba7ec802f7..83e879440c 100644 --- a/metis-dereference/metis-dereference-common/pom.xml +++ b/metis-dereference/metis-dereference-common/pom.xml @@ -4,7 +4,7 @@ metis-dereference eu.europeana.metis - 12.2 + 13 metis-dereference-common diff --git a/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformer.java b/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformer.java index 8d2677b9a9..9f3d8b06ee 100644 --- a/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformer.java +++ b/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformer.java @@ -1,6 +1,6 @@ package eu.europeana.metis.dereference; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; import eu.europeana.metis.exception.BadContentException; import java.io.ByteArrayInputStream; @@ -105,8 +105,7 @@ private Optional getValidatedXml(String resourceId, String xml) throws B if (isEmptyXml(xml)) { xmlResponse = Optional.empty(); if (LOGGER.isInfoEnabled()) { - LOGGER.info("Transformed entity {} results to an empty XML.", - CRLF_PATTERN.matcher(resourceId).replaceAll("")); + LOGGER.info("Transformed entity {} results to an empty XML.", sanitizeCRLF(resourceId)); } } else { try { diff --git a/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/ProcessedEntity.java b/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/ProcessedEntity.java index 47106bd2d5..e6e1462f08 100644 --- a/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/ProcessedEntity.java +++ b/metis-dereference/metis-dereference-common/src/main/java/eu/europeana/metis/dereference/ProcessedEntity.java @@ -7,6 +7,7 @@ import dev.morphia.annotations.Index; import dev.morphia.annotations.IndexOptions; import dev.morphia.annotations.Indexes; +import eu.europeana.enrichment.api.external.DereferenceResultStatus; import eu.europeana.metis.mongo.utils.ObjectIdSerializer; import jakarta.xml.bind.annotation.XmlElement; import jakarta.xml.bind.annotation.XmlRootElement; @@ -27,12 +28,12 @@ public class ProcessedEntity { private ObjectId id; /** - * The resourceId (URI) of the resource + * The resourceId (URI) of the resource. **/ private String resourceId; /** - * A xml representation of the mapped resource in one of the contextual resources + * A xml representation of the contextual resource (transformed from the original entity). **/ private String xml; @@ -41,6 +42,11 @@ public class ProcessedEntity { **/ private String vocabularyId; + /** + * The status of the dereference operation. + */ + private DereferenceResultStatus resultStatus; + @XmlElement public ObjectId getId() { return id; @@ -76,4 +82,13 @@ public String getVocabularyId() { public void setVocabularyId(String vocabularyId) { this.vocabularyId = vocabularyId; } + + @XmlElement + public DereferenceResultStatus getResultStatus() { + return resultStatus; + } + + public void setResultStatus(DereferenceResultStatus resultStatus) { + this.resultStatus = resultStatus; + } } diff --git a/metis-dereference/metis-dereference-common/src/test/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformerTest.java b/metis-dereference/metis-dereference-common/src/test/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformerTest.java index 0438de2929..fde13dabaa 100644 --- a/metis-dereference/metis-dereference-common/src/test/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformerTest.java +++ b/metis-dereference/metis-dereference-common/src/test/java/eu/europeana/metis/dereference/IncomingRecordToEdmTransformerTest.java @@ -16,11 +16,11 @@ class IncomingRecordToEdmTransformerTest { - private static final String copyXmlXsltFileName = "copy_xml.xslt"; - private static final String produceEmptyXsltFileName = "produce_empty.xslt"; - private static final String produceInvalidXmlXsltFileName = "produce_invalid_xml.xslt"; - private static final String ysoP105069FileName = "yso_p105069.xml"; - private static final String invalidXmlFileName = "invalid_xml.xml"; + private static final String COPY_XML_XSLT_FILE_NAME = "copy_xml.xslt"; + private static final String PRODUCE_EMPTY_XSLT_FILE_NAME = "produce_empty.xslt"; + private static final String PRODUCE_INVALID_XML_XSLT_FILE_NAME = "produce_invalid_xml.xslt"; + private static final String YSO_P_105069_FILE_NAME = "yso_p105069.xml"; + private static final String INVALID_XML_FILE_NAME = "invalid_xml.xml"; private static String copyXmlXsltString; private static String produceEmptyXsltString; @@ -31,19 +31,19 @@ class IncomingRecordToEdmTransformerTest { @BeforeAll static void setUp() throws Exception { ClassLoader classLoader = IncomingRecordToEdmTransformerTest.class.getClassLoader(); - Path path = Paths.get(Objects.requireNonNull(classLoader.getResource(copyXmlXsltFileName)).toURI()); + Path path = Paths.get(Objects.requireNonNull(classLoader.getResource(COPY_XML_XSLT_FILE_NAME)).toURI()); copyXmlXsltString = Files.readString(path, StandardCharsets.UTF_8); - path = Paths.get(Objects.requireNonNull(classLoader.getResource(produceEmptyXsltFileName)).toURI()); + path = Paths.get(Objects.requireNonNull(classLoader.getResource(PRODUCE_EMPTY_XSLT_FILE_NAME)).toURI()); produceEmptyXsltString = Files.readString(path, StandardCharsets.UTF_8); - path = Paths.get(Objects.requireNonNull(classLoader.getResource(produceInvalidXmlXsltFileName)).toURI()); + path = Paths.get(Objects.requireNonNull(classLoader.getResource(PRODUCE_INVALID_XML_XSLT_FILE_NAME)).toURI()); produceInvalidXmlXsltString = Files.readString(path, StandardCharsets.UTF_8); - path = Paths.get(Objects.requireNonNull(classLoader.getResource(ysoP105069FileName)).toURI()); + path = Paths.get(Objects.requireNonNull(classLoader.getResource(YSO_P_105069_FILE_NAME)).toURI()); ysoP105069String = Files.readString(path, StandardCharsets.UTF_8); - path = Paths.get(Objects.requireNonNull(classLoader.getResource(invalidXmlFileName)).toURI()); + path = Paths.get(Objects.requireNonNull(classLoader.getResource(INVALID_XML_FILE_NAME)).toURI()); invalidXmlString = Files.readString(path, StandardCharsets.UTF_8); } diff --git a/metis-dereference/metis-dereference-import/pom.xml b/metis-dereference/metis-dereference-import/pom.xml index 1f19859337..36ed4e9f46 100644 --- a/metis-dereference/metis-dereference-import/pom.xml +++ b/metis-dereference/metis-dereference-import/pom.xml @@ -4,7 +4,7 @@ metis-dereference eu.europeana.metis - 12.2 + 13 4.0.0 metis-dereference-import diff --git a/metis-dereference/metis-dereference-rest/pom.xml b/metis-dereference/metis-dereference-rest/pom.xml index c8cdce46c7..f7f882e9d8 100644 --- a/metis-dereference/metis-dereference-rest/pom.xml +++ b/metis-dereference/metis-dereference-rest/pom.xml @@ -4,7 +4,7 @@ metis-dereference eu.europeana.metis - 12.2 + 13 metis-dereference-rest diff --git a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/controller/DereferencingController.java b/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/controller/DereferencingController.java index 7929cf3c08..088dd4e5a0 100644 --- a/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/controller/DereferencingController.java +++ b/metis-dereference/metis-dereference-rest/src/main/java/eu/europeana/metis/dereference/rest/controller/DereferencingController.java @@ -11,7 +11,6 @@ import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.responses.ApiResponse; import java.util.List; -import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.GetMapping; @@ -81,8 +80,7 @@ public EnrichmentResultList dereference(@RequestBody List resourceIds) { return new EnrichmentResultList(resourceIds.stream() .map(this::dereferenceInternal) .map(item -> new EnrichmentResultBaseWrapper(item.getEnrichmentBasesAsList(), - item.getDereferenceStatus())) - .collect(Collectors.toList())); + item.getDereferenceStatus())).toList()); } catch (RuntimeException e) { throw new DereferenceException(generateExceptionMessage(String.join(",", resourceIds), e), e); } diff --git a/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingControllerTest.java b/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingControllerTest.java index 5d78291629..ed50b54895 100644 --- a/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingControllerTest.java +++ b/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingControllerTest.java @@ -1,5 +1,6 @@ package eu.europeana.metis.dereference.rest; +import static java.util.Map.entry; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; @@ -17,7 +18,6 @@ import eu.europeana.metis.utils.RestEndpoints; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.jupiter.api.BeforeEach; @@ -46,6 +46,24 @@ void setUp() { .setControllerAdvice(new RestResponseExceptionHandler()).build(); } + private Map getNamespaceMap() { + return Map.ofEntries( + entry("metis", "http://www.europeana.eu/schemas/metis"), + entry("edm", "http://www.europeana.eu/schemas/edm/"), + entry("skos", "http://www.w3.org/2004/02/skos/core#"), + entry("dcterms", "http://purl.org/dc/terms/"), + entry("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"), + entry("rdfs", "http://www.w3.org/2000/01/rdf-schema"), + entry("cc", "http://creativecommons.org/ns"), + entry("foaf", "http://xmlns.com/foaf/0.1/"), + entry("wgs84_pos", "http://www.w3.org/2003/01/geo/wgs84_pos#"), + entry("owl", "http://www.w3.org/2002/07/owl#"), + entry("xml", "http://www.w3.org/XML/1998/namespace"), + entry("dc", "http://purl.org/dc/elements/1.1/"), + entry("rdaGr2", "http://rdvocab.info/ElementsGr2/") + ); + } + @Test void dereferenceGet_outputXML_expectSuccess() throws Exception { when(dereferenceServiceMock.dereference("http://www.example.com")).thenReturn( @@ -130,22 +148,4 @@ private Agent getAgent(String uri) { return agent; } - - private Map getNamespaceMap() { - Map namespaceMap = new HashMap<>(); - namespaceMap.put("metis", "http://www.europeana.eu/schemas/metis"); - namespaceMap.put("edm", "http://www.europeana.eu/schemas/edm/"); - namespaceMap.put("skos", "http://www.w3.org/2004/02/skos/core#"); - namespaceMap.put("dcterms", "http://purl.org/dc/terms/"); - namespaceMap.put("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); - namespaceMap.put("rdfs", "http://www.w3.org/2000/01/rdf-schema"); - namespaceMap.put("cc", "http://creativecommons.org/ns"); - namespaceMap.put("foaf", "http://xmlns.com/foaf/0.1/"); - namespaceMap.put("wgs84_pos", "http://www.w3.org/2003/01/geo/wgs84_pos#"); - namespaceMap.put("owl", "http://www.w3.org/2002/07/owl#"); - namespaceMap.put("xml", "http://www.w3.org/XML/1998/namespace"); - namespaceMap.put("dc", "http://purl.org/dc/elements/1.1/"); - namespaceMap.put("rdaGr2", "http://rdvocab.info/ElementsGr2/"); - return namespaceMap; - } } diff --git a/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingManagementControllerTest.java b/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingManagementControllerTest.java index c53c23ede8..76d9f2478b 100644 --- a/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingManagementControllerTest.java +++ b/metis-dereference/metis-dereference-rest/src/test/java/eu/europeana/metis/dereference/rest/DereferencingManagementControllerTest.java @@ -124,19 +124,6 @@ void testEmptyCacheByEmptyXml() throws Exception { assertEquals("OK", testEmptyCacheResult); } - @Test - void testEmptyNullOrEmptyXML() throws Exception { - doAnswer((Answer) invocationOnMock -> { - testEmptyCacheResult = "OK"; - return null; - }).when(deRefManagementServiceMock).purgeByNullOrEmptyXml(); - - deRefManagementControllerMock.perform(delete(RestEndpoints.CACHE_EMPTY_XML)).andExpect(status().is(200)); - - assertEquals("OK", testEmptyCacheResult); - } - - @Test void testEmptyCacheByResourceId() throws Exception { diff --git a/metis-dereference/metis-dereference-service/pom.xml b/metis-dereference/metis-dereference-service/pom.xml index 75c0efdcfd..393c4408b4 100644 --- a/metis-dereference/metis-dereference-service/pom.xml +++ b/metis-dereference/metis-dereference-service/pom.xml @@ -4,7 +4,7 @@ metis-dereference eu.europeana.metis - 12.2 + 13 metis-dereference-service diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceResultWrapper.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceResultWrapper.java deleted file mode 100644 index 7360a0d069..0000000000 --- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceResultWrapper.java +++ /dev/null @@ -1,112 +0,0 @@ -package eu.europeana.metis.dereference.service; - -import eu.europeana.enrichment.api.external.DereferenceResultStatus; -import eu.europeana.enrichment.api.external.model.EnrichmentBase; -import eu.europeana.metis.dereference.Vocabulary; - -/** - * Dereference result of enrichment base or enrichment entity - */ -public class DereferenceResultWrapper { - - private final EnrichmentBase enrichmentBase; - private final Vocabulary vocabulary; - private final String entity; - private final DereferenceResultStatus dereferenceResultStatus; - - /** - * Constructor for result including enrichment and vocabulary - * - * @param enrichmentBase enrichment base - * @param vocabulary vocabulary related to dereferenced entity - */ - public DereferenceResultWrapper(EnrichmentBase enrichmentBase, Vocabulary vocabulary) { - this.enrichmentBase = enrichmentBase; - this.vocabulary = vocabulary; - this.entity = null; - this.dereferenceResultStatus = null; - } - - /** - * Constructor for result including enrichment, vocabulary and status - * - * @param enrichmentBase enrichment base - * @param vocabulary vocabulary related to dereferenced entity - * @param dereferenceResultStatus status of the dereference process - */ - public DereferenceResultWrapper(EnrichmentBase enrichmentBase, Vocabulary vocabulary, - DereferenceResultStatus dereferenceResultStatus) { - this.enrichmentBase = enrichmentBase; - this.vocabulary = vocabulary; - this.dereferenceResultStatus = dereferenceResultStatus; - this.entity = null; - } - - /** - * Constructor for including status - * - * @param dereferenceResultStatus status of the dereference process - */ - public DereferenceResultWrapper(DereferenceResultStatus dereferenceResultStatus) { - this.enrichmentBase = null; - this.vocabulary = null; - this.entity = null; - this.dereferenceResultStatus = dereferenceResultStatus; - } - - /** - * Constructor for including entity and vocabulary - * - * @param entity entity dereferenced - * @param vocabulary vocabulary related to dereferenced entity - */ - public DereferenceResultWrapper(String entity, Vocabulary vocabulary) { - this.entity = entity; - this.vocabulary = vocabulary; - this.enrichmentBase = null; - this.dereferenceResultStatus = null; - } - - /** - * Constructor for including vocabulary and status - * - * @param vocabulary vocabulary related to dereferenced entity - * @param dereferenceResultStatus status of the dereference process - */ - public DereferenceResultWrapper(Vocabulary vocabulary, DereferenceResultStatus dereferenceResultStatus) { - this.enrichmentBase = null; - this.vocabulary = vocabulary; - this.entity = null; - this.dereferenceResultStatus = dereferenceResultStatus; - } - - /** - * Constructor for including entity and vocabulary - * - * @param entity entity dereferenced - * @param vocabulary vocabulary related to dereferenced entity - * @param dereferenceResultStatus status of the dereference process - */ - public DereferenceResultWrapper(String entity, Vocabulary vocabulary, DereferenceResultStatus dereferenceResultStatus) { - this.entity = entity; - this.vocabulary = vocabulary; - this.dereferenceResultStatus = dereferenceResultStatus; - this.enrichmentBase = null; - } - - public EnrichmentBase getEnrichmentBase() { - return enrichmentBase; - } - - public Vocabulary getVocabulary() { - return vocabulary; - } - - public String getEntity() { - return entity; - } - - public DereferenceResultStatus getDereferenceResultStatus() { - return dereferenceResultStatus; - } -} diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceService.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceService.java index 591beda752..eca8b380b5 100644 --- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceService.java +++ b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/DereferenceService.java @@ -3,17 +3,27 @@ import eu.europeana.metis.dereference.DereferenceResult; /** - * Dereferencing service Created by ymamakis on 2/11/16. + * Implementations of this interface are able to dereference resource IDs. If the resource's + * vocabulary specifies a positive iteration count, this method also repeatedly retrieves the + * 'broader' resources and returns those as well. */ public interface DereferenceService { /** - * Dereference a URI + *

+ * This method dereferences a resource. If the resource's vocabulary specifies a positive + * iteration count, this method also repeatedly retrieves the 'broader' resources and returns + * those as well. + *

+ *

+ * A resource may have references to its 'broader' resources. these resources form a directed + * graph and the iteration count is the distance from the requested resource. This method performs + * a breadth-first search through this graph to retrieve all resources within a certain distance + * from the requested resource. The distance depends on the vocabulary of the main resource. + *

* - * @param resourceId The resource ID (URI) to dereference - * @return Dereferenceresult contains of the dereferenced entity (or multiple in case of parent entities). List is not null, but - * could be empty and the dereference result status of enrichment. If an exception occurs the status is not set, it should be - * captured by the callee. + * @param resourceId The resource to dereference. + * @return An object containing the dereferenced resources and the result status of the process. */ DereferenceResult dereference(String resourceId); } diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferenceService.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferenceService.java index 21a299f2e3..3a6e12bb8c 100644 --- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferenceService.java +++ b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferenceService.java @@ -1,6 +1,6 @@ package eu.europeana.metis.dereference.service; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; import eu.europeana.enrichment.api.external.DereferenceResultStatus; import eu.europeana.enrichment.api.external.model.Concept; @@ -20,25 +20,22 @@ import eu.europeana.metis.dereference.service.utils.GraphUtils; import eu.europeana.metis.dereference.service.utils.VocabularyCandidates; import eu.europeana.metis.exception.BadContentException; +import jakarta.xml.bind.JAXBException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; import java.util.stream.Stream; -import jakarta.xml.bind.JAXBException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,7 +43,8 @@ import org.springframework.stereotype.Component; /** - * Mongo implementation of the dereference service Created by ymamakis on 2/11/16. + * Implementation of {@link DereferenceService} that uses the MongoDB for retrieving vocabularies + * and for caching. */ @Component public class MongoDereferenceService implements DereferenceService { @@ -57,6 +55,16 @@ public class MongoDereferenceService implements DereferenceService { private final ProcessedEntityDao processedEntityDao; private final VocabularyDao vocabularyDao; + private record OriginalEntity(String entity, DereferenceResultStatus resultStatus) {} + + private record TransformedEntity(Vocabulary vocabulary, String entity, + DereferenceResultStatus resultStatus) {} + + private record DeserializedEntity(EnrichmentBase entity, DereferenceResultStatus status) {} + + private record MatchedVocabularies(VocabularyCandidates candidates, + DereferenceResultStatus status) {} + /** * Constructor. * @@ -83,382 +91,237 @@ public MongoDereferenceService(ProcessedEntityDao processedEntityDao, this.vocabularyDao = vocabularyDao; } - private static DereferenceResult checkEmptyEnrichmentBaseAndVocabulary( - DereferenceResultWrapper resource) { - DereferenceResult dereferenceResult = null; - // No EnrichmentBase and no Vocabulary. - if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null - && resource.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) { - dereferenceResult = new DereferenceResult(DereferenceResultStatus.NO_VOCABULARY_MATCHING); - // No EnrichmentBase, no Vocabulary and an error occurred. - } else if (resource.getEnrichmentBase() == null && resource.getVocabulary() == null) { - dereferenceResult = new DereferenceResult(resource.getDereferenceResultStatus()); + @Override + public DereferenceResult dereference(String resourceId) { + + // Sanity checks + if (resourceId == null) { + throw new IllegalArgumentException("Parameter resourceId cannot be null."); } - return dereferenceResult; - } - private static Stream getStream(Collection collection) { - return collection == null ? Stream.empty() : collection.stream(); - } + // Get the main object to dereference. In case of errors we are done. + final TransformedEntity resource = dereferenceSingleResource(resourceId); + if (resource.resultStatus() != DereferenceResultStatus.SUCCESS) { + return new DereferenceResult(resource.resultStatus()); + } - private static DereferenceResultWrapper evaluateTransformedEntityAndVocabulary( - VocabularyCandidates vocabularyCandidates, - String transformedEntity, Vocabulary chosenVocabulary, - MongoDereferencedEntity originalEntity) { - final DereferenceResultWrapper dereferenceResultWrapper; - // If retrieval or transformation of entity failed, and we have one vocabulary then we store that - if (transformedEntity == null && vocabularyCandidates.getVocabularies().size() == 1) { - dereferenceResultWrapper = new DereferenceResultWrapper( - vocabularyCandidates.getVocabularies().get(0), - originalEntity.getDereferenceResultStatus()); - } else { - if (transformedEntity == null && chosenVocabulary == null && originalEntity.getDereferenceResultStatus() == null) { - dereferenceResultWrapper = new DereferenceResultWrapper((EnrichmentBase) null, - null, - DereferenceResultStatus.NO_VOCABULARY_MATCHING); - } else { - dereferenceResultWrapper = new DereferenceResultWrapper(transformedEntity, chosenVocabulary, - originalEntity.getDereferenceResultStatus()); - } + // Deserialize the entity + final EnrichmentBase deserializedEntity; + try { + deserializedEntity = resource.entity() == null ? null + : EnrichmentBaseConverter.convertToEnrichmentBase(resource.entity()); + } catch (JAXBException e) { + LOGGER.info("Problem occurred while parsing transformed entity {}.", resourceId, e); + return new DereferenceResult(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR); } - return dereferenceResultWrapper; - } - /** - * Mongo dereference implementation - * - * @param resourceId The resource ID (URI) to dereference - * @return Dereference results with dereference status. - * @throws IllegalArgumentException In case the Parameter is null. - */ - @Override - public DereferenceResult dereference(String resourceId) { - // Sanity check - if (resourceId == null) { - throw new IllegalArgumentException("Parameter resourceId cannot be null."); + // Perform the breadth-first search to search for broader terms (if needed). + final int iterations = resource.vocabulary().getIterations(); + final Map result; + if (iterations > 0) { + result = GraphUtils.breadthFirstSearch(resourceId, + new DeserializedEntity(deserializedEntity, resource.resultStatus()), + resource.vocabulary().getIterations(), + this::resolveBroaderValue, this::extractBroaderResources); + } else { + result = new HashMap<>(); + result.put(resourceId, new DeserializedEntity(deserializedEntity, + resource.resultStatus())); } - return dereferenceResource(resourceId); + // Done. Collect results. + return new DereferenceResult( + result.values().stream().map(DeserializedEntity::entity).toList(), + result.values().stream().map(DeserializedEntity::status).filter(Objects::nonNull).findFirst() + .orElse(DereferenceResultStatus.SUCCESS)); } - /** - *

- * This method dereferences a resource. If the resource's vocabulary specifies a positive - * iteration count, this method also repeatedly retrieves the 'broader' resources and returns - * those as well. - *

- *

- * A resource has references to its 'broader' resources (see - * {@link #extractBroaderResources(EnrichmentBase, Set)}). As such, the resources form a directed - * graph and the iteration count is the distance from the requested resource. This method performs - * a breadth-first search through this graph to retrieve all resources within a certain distance - * from the requested resource. - *

- *

The Dereference result contains a collection of dereferenced resources. - * Note: That could not be null, but could be empty. The deferenced status could have the - * following values: - *

    - *
  • NO_VOCABULARY_MATCHING, this occurs if there is no enrichment base and no vocabulary.
  • - *
  • NO_ENTITY_FOR_VOCABULARY, this means the resource was found but no vocabulary and enrichment was found.
  • - *
  • ENTITY_FOUND_XLT_ERROR, this occurs when an JAXBExcetion happened.
  • - *
  • INVALID_URL, this occurs when an URIException happened.
  • - *
  • UNKNOWN_EUROPEANA_ENTITY, this occurs when the europeana entity is unknown.
  • - *
  • SUCCESS, this means everything was processed successfully.
  • - *
- *

- * - * @param resourceId The resource to dereference. - * @return An object containing the dereferenced resources and the status of dereference process. - */ - private DereferenceResult dereferenceResource(String resourceId) { - DereferenceResult dereferenceResult; + private DeserializedEntity resolveBroaderValue(String resourceId) { + final TransformedEntity resource = dereferenceSingleResource(resourceId); + final EnrichmentBase deserializedEntity; try { - // Get the main object to dereference. If null, we are done. - final DereferenceResultWrapper resource = computeEnrichmentBaseVocabulary(resourceId); - - dereferenceResult = checkEmptyEnrichmentBaseAndVocabulary(resource); - - if (dereferenceResult == null) { - // Create value resolver that catches exceptions and logs them. - final Function> valueResolver = getValueResolver(); - - // Perform the breadth-first search to search for broader terms (if needed). - final int iterations = resource.getVocabulary().getIterations(); - final Map> result; - if (iterations > 0) { - result = GraphUtils - .breadthFirstSearch(resourceId, - new ImmutablePair<>(resource.getEnrichmentBase(), - resource.getDereferenceResultStatus()), - resource.getVocabulary().getIterations(), - valueResolver, this::extractBroaderResources); - } else { - result = new HashMap<>(); - result.put(resourceId, new ImmutablePair<>(resource.getEnrichmentBase(), - resource.getDereferenceResultStatus())); - } - // Done - dereferenceResult = new DereferenceResult( - result.values().stream().map(Pair::getLeft).collect(Collectors.toList()), - result.values().stream().map(Pair::getRight).filter(Objects::nonNull).findFirst() - .orElse(DereferenceResultStatus.SUCCESS)); - } - } catch (JAXBException jaxbException) { - LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId), - jaxbException); - // No EnrichmentBase + Status - dereferenceResult = new DereferenceResult( - DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR); - } catch (URISyntaxException uriSyntaxException) { - LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", resourceId), - uriSyntaxException); - // No EnrichmentBase + Status - dereferenceResult = new DereferenceResult(DereferenceResultStatus.INVALID_URL); + deserializedEntity = resource.entity() == null ? null + : EnrichmentBaseConverter.convertToEnrichmentBase(resource.entity()); + } catch (JAXBException e) { + LOGGER.info("Problem occurred while parsing transformed entity {}.", resourceId, e); + return new DeserializedEntity(null, DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR); } - return dereferenceResult; + return new DeserializedEntity(deserializedEntity, resource.resultStatus()); } - private Function> getValueResolver() { - return key -> { - DereferenceResultWrapper result; - try { - result = computeEnrichmentBaseVocabulary(key); - if (result.getEnrichmentBase() == null && result.getVocabulary() == null - && result.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) { - // No EnrichmentBase + Status - return new ImmutablePair<>(null, DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY); - } else { - // EnrichmentBase + Status - return new ImmutablePair<>(result.getEnrichmentBase(), - result.getDereferenceResultStatus()); - } - } catch (JAXBException jaxbException) { - LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key), - jaxbException); - // No EnrichmentBase + Status - return new ImmutablePair<>(null, DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR); - } catch (URISyntaxException uriSyntaxException) { - LOGGER.warn(String.format("Problem occurred while dereferencing broader resource %s.", key), - uriSyntaxException); - // No EnrichmentBase + Status - return new ImmutablePair<>(null, DereferenceResultStatus.INVALID_URL); - } + private void extractBroaderResources(DeserializedEntity resource, Set destination) { + final Stream resourceIdStream = switch (resource.entity) { + case Concept concept -> Optional.ofNullable(concept.getBroader()).stream() + .flatMap(Collection::stream).map(Resource::getResource); + case TimeSpan timeSpan -> Optional.ofNullable(timeSpan.getIsPartOf()).stream() + .flatMap(List::stream).map(LabelResource::getResource); + case Place place -> Optional.ofNullable(place.getIsPartOf()).stream() + .flatMap(Collection::stream).map(LabelResource::getResource); + case null, default -> Stream.empty(); }; + resourceIdStream.filter(Objects::nonNull).forEach(destination::add); } - private void extractBroaderResources(Pair resource, - Set destination) { - final Stream resourceIdStream; - if (resource.getLeft() instanceof Concept concept) { - resourceIdStream = getStream(concept.getBroader()).map(Resource::getResource); - } else if (resource.getLeft() instanceof TimeSpan timeSpan) { - resourceIdStream = Optional.ofNullable(timeSpan.getIsPartOf()).stream() - .flatMap(List::stream).map(LabelResource::getResource); - } else if (resource.getLeft() instanceof Place place) { - resourceIdStream = Optional.ofNullable(place.getIsPartOf()).stream() - .flatMap(Collection::stream).map(LabelResource::getResource); - } else { - resourceIdStream = Stream.empty(); - } - resourceIdStream.filter(Objects::nonNull).forEach(destination::add); - } + private TransformedEntity dereferenceSingleResource(String resourceId) { - /** - * Computes the entity and vocabulary. - *

It will use the cache if it's still valid, otherwise it will retrieve(if applicable) the - * original entity and transform the result.

- *

The possible outcomes are: - *

    - *
  • Both items of the pair are null. We do not have a vocabulary candidate or we have more - * than one vocabulary candidate and all have not succeed either retrieving the original - * entity or transforming the retrieved entity.
  • - *
  • Entity xml(Left) is null, and vocabulary(Right) is non null. We have a vocabulary - * and the entity xml failed either to be retried or failed transformation.
  • - *
  • Entity xml(Left) is non null, and vocabulary(Right) is non null. We have a - * successful retrieval and transformation.
  • - *
- *

- * - * @param resourceId the url of the provider entity - * @param cachedEntity the cached entity object - * @return a EnrichmentEntityVocabulary with the entity, vocabulary, and status. - * @throws URISyntaxException if the resource identifier url is invalid - */ - private DereferenceResultWrapper computeEntityVocabulary(String resourceId, - ProcessedEntity cachedEntity) - throws URISyntaxException { - - final DereferenceResultWrapper transformedEntityVocabulary; - - //Check if vocabulary actually exists - Vocabulary cachedVocabulary = null; - boolean cachedVocabularyChanged = false; - if (cachedEntity != null && StringUtils.isNotBlank(cachedEntity.getVocabularyId())) { - cachedVocabulary = vocabularyDao.get(cachedEntity.getVocabularyId()); - cachedVocabularyChanged = cachedVocabulary == null; + // Check for URI validity. + try { + new URI(resourceId); + } catch (URISyntaxException e) { + LOGGER.warn("Invalid URI: {} with message: {}", resourceId, e.getMessage()); + return new TransformedEntity(null, null, DereferenceResultStatus.INVALID_URL); } - // If we do not have any cached entity, we need to compute it - if (cachedEntity == null || cachedVocabularyChanged) { - transformedEntityVocabulary = retrieveTransformAndEvaluateEntity(resourceId, cachedEntity); - - } else { - // if there was no xml entity but a vocabulary that means no entity for vocabulary - if (cachedEntity.getXml() == null && StringUtils.isNotBlank(cachedEntity.getVocabularyId())) { - transformedEntityVocabulary = new DereferenceResultWrapper((EnrichmentBase) null, - cachedVocabulary, DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY); - } else { - // otherwise If we have something in the cache we return that instead - transformedEntityVocabulary = retrieveTransformAndEvaluateEntity(resourceId, cachedEntity); - } + // Check if a cached item exists for this resource ID. + final TransformedEntity cachedEntity = getFromCache(resourceId); + if (cachedEntity != null) { + return cachedEntity; } - return transformedEntityVocabulary; + // So no cached item exists. Perform the actual algorithm and save the result to cache. + final TransformedEntity result = performDereferenceAlgorithmForSingleResource(resourceId); + saveToCache(resourceId, result); + return result; } - private DereferenceResultWrapper retrieveTransformAndEvaluateEntity(String resourceId, ProcessedEntity cachedEntity) - throws URISyntaxException { + private TransformedEntity performDereferenceAlgorithmForSingleResource(String resourceId) { - final VocabularyCandidates vocabularyCandidates = VocabularyCandidates - .findVocabulariesForUrl(resourceId, vocabularyDao::getByUriSearch); - - String transformedEntity = null; - Vocabulary chosenVocabulary = null; + // Find matching vocabularies, report if there are none. + final MatchedVocabularies vocabularyCandidates = getCandidateVocabularies(resourceId); + if (vocabularyCandidates.status != DereferenceResultStatus.SUCCESS) { + return new TransformedEntity(null, null, vocabularyCandidates.status); + } - MongoDereferencedEntity originalEntity = new MongoDereferencedEntity(resourceId, null); - MongoDereferencedEntity entityTransformed = new MongoDereferencedEntity(null, null); - //Only if we have vocabularies we continue - if (!vocabularyCandidates.isEmpty()) { - if (cachedEntity == null) { - originalEntity = retrieveOriginalEntity(resourceId, vocabularyCandidates); - } else { - originalEntity = new MongoDereferencedEntity(cachedEntity.getXml(), DereferenceResultStatus.SUCCESS); - } - //If original entity exists, try transformation - if (originalEntity.getEntity() != null - && originalEntity.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS) { - // Transform the original entity and find vocabulary if applicable. - for (Vocabulary vocabulary : vocabularyCandidates.getVocabularies()) { - entityTransformed = transformEntity(vocabulary, originalEntity.getEntity(), resourceId); - transformedEntity = entityTransformed.getEntity(); - if (transformedEntity != null) { - chosenVocabulary = vocabulary; - break; - } - } - // There was an update in transforming, so we update the result status. - if (originalEntity.getDereferenceResultStatus() - != entityTransformed.getDereferenceResultStatus()) { - originalEntity = new MongoDereferencedEntity(originalEntity.getEntity(), - entityTransformed.getDereferenceResultStatus()); - } - } + // If there are vocabularies, we attempt to obtain the original entity from source. + final OriginalEntity originalEntity = retrieveOriginalEntity(resourceId, + vocabularyCandidates.candidates.getVocabulariesSuffixes()); + if (originalEntity.resultStatus() != DereferenceResultStatus.SUCCESS) { + return new TransformedEntity(null, null, originalEntity.resultStatus()); } - DereferenceResultWrapper result = evaluateTransformedEntityAndVocabulary(vocabularyCandidates, transformedEntity, - chosenVocabulary, originalEntity); - - if (entityTransformed.getDereferenceResultStatus() == DereferenceResultStatus.SUCCESS && cachedEntity == null) { - saveEntity(resourceId, - new DereferenceResultWrapper(entityTransformed.getEntity(), - result.getVocabulary())); - } else if (cachedEntity == null) { - saveEntity(resourceId, - new DereferenceResultWrapper(originalEntity.getEntity(), - result.getVocabulary())); + // If we managed to obtain the original entity, we will try to transform it. + final Set statuses = EnumSet.noneOf(DereferenceResultStatus.class); + for (Vocabulary vocabulary : vocabularyCandidates.candidates.getVocabularies()) { + final TransformedEntity transformedEntity = transformEntity(vocabulary, + originalEntity.entity(), resourceId); + if (transformedEntity.resultStatus() == DereferenceResultStatus.SUCCESS) { + return transformedEntity; + } + statuses.add(transformedEntity.resultStatus()); } - return result; + // If we here, we did not find a successful transformation. + final DereferenceResultStatus status = statuses.contains( + DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS) + ? DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS + : statuses.stream().findAny().orElseThrow(IllegalStateException::new); + return new TransformedEntity(null, null, status); } - private void saveEntity(String resourceId, DereferenceResultWrapper transformedEntityAndVocabularyPair) { + private MatchedVocabularies getCandidateVocabularies(String resourceId) { - final String entityXml = transformedEntityAndVocabularyPair.getEntity(); - final Vocabulary vocabulary = transformedEntityAndVocabularyPair.getVocabulary(); - final String vocabularyIdString = Optional.ofNullable(vocabulary).map(Vocabulary::getId) - .map(ObjectId::toString).orElse(null); - //Save entity - ProcessedEntity entityToCache = new ProcessedEntity(); - entityToCache.setResourceId(resourceId); - entityToCache.setXml(entityXml); - entityToCache.setVocabularyId(vocabularyIdString); - processedEntityDao.save(entityToCache); + // Find matching vocabularies. + final VocabularyCandidates vocabularyCandidates; + try { + vocabularyCandidates = VocabularyCandidates.findVocabulariesForUrl(resourceId, + vocabularyDao::getByUriSearch); + } catch (URISyntaxException e) { + // Shouldn't happen as we checked this before. + LOGGER.warn(String.format("Problem occurred while dereferencing resource %s.", + resourceId), e); + return new MatchedVocabularies(null, DereferenceResultStatus.FAILURE); + } + + // Report if there are none. + if (vocabularyCandidates.isEmpty()) { + return new MatchedVocabularies(null, DereferenceResultStatus.NO_VOCABULARY_MATCHING); + } + + // Return result. + return new MatchedVocabularies(vocabularyCandidates, DereferenceResultStatus.SUCCESS); } - private MongoDereferencedEntity transformEntity(Vocabulary vocabulary, - final String originalEntity, final String resourceId) { - Optional result; - DereferenceResultStatus resultStatus; + private TransformedEntity transformEntity(Vocabulary vocabulary, final String originalEntity, + final String resourceId) { try { - final IncomingRecordToEdmTransformer incomingRecordToEdmTransformer = new IncomingRecordToEdmTransformer( - vocabulary.getXslt()); - result = incomingRecordToEdmTransformer.transform(originalEntity, resourceId); - if (result.isEmpty()) { + final IncomingRecordToEdmTransformer incomingRecordToEdmTransformer = + new IncomingRecordToEdmTransformer(vocabulary.getXslt()); + final String result = incomingRecordToEdmTransformer + .transform(originalEntity, resourceId).orElse(null); + final DereferenceResultStatus resultStatus; + if (result == null) { resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS; } else { resultStatus = DereferenceResultStatus.SUCCESS; } + return new TransformedEntity(vocabulary, result, resultStatus); } catch (TransformerException | BadContentException | ParserConfigurationException e) { - LOGGER.warn("Error transforming entity: {} with message: {}", resourceId, e.getMessage()); + LOGGER.warn("Error transforming entity: {} with message: {}", resourceId, + e.getMessage()); LOGGER.debug("Transformation issue: ", e); - resultStatus = DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR; - result = Optional.empty(); + return new TransformedEntity(vocabulary, null, + DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR); } - return new MongoDereferencedEntity(result.orElse(null), resultStatus); } - private MongoDereferencedEntity retrieveOriginalEntity(String resourceId, - VocabularyCandidates candidates) { - DereferenceResultStatus dereferenceResultStatus = DereferenceResultStatus.SUCCESS; + private OriginalEntity retrieveOriginalEntity(String resourceId, Set potentialSuffixes) { - if (candidates.isEmpty()) { - dereferenceResultStatus = DereferenceResultStatus.NO_VOCABULARY_MATCHING; - return new MongoDereferencedEntity(null, dereferenceResultStatus); - } else { + // Sanity check: this should not happen. + if (potentialSuffixes.isEmpty()) { + throw new IllegalArgumentException(); + } + + // Compute the result (a URI syntax issue is considered a problem with the suffix). + final String originalEntity = potentialSuffixes.stream().map(suffix -> { try { - // Check the input (check the resource ID for URI syntax). - new URI(resourceId); - } catch (URISyntaxException e) { - LOGGER.error("Invalid URI: {} with message: {}", resourceId, e.getMessage()); - dereferenceResultStatus = DereferenceResultStatus.INVALID_URL; - return new MongoDereferencedEntity(null, dereferenceResultStatus); - } - // Compute the result (a URI syntax issue is considered a problem with the suffix). - final String originalEntity = candidates.getVocabulariesSuffixes().stream().map(suffix -> { - try { - return retriever.retrieve(resourceId, suffix); - } catch (IOException | URISyntaxException e) { - LOGGER.warn("Failed to retrieve: {} with message: {}", resourceId, e.getMessage()); - LOGGER.debug("Problem retrieving resource.", e); - return null; - } - }).filter(Objects::nonNull).findAny().orElse(null); - - // Evaluate the result. - if (originalEntity == null) { - if (LOGGER.isInfoEnabled()) { - LOGGER.info("No entity XML for uri {}", CRLF_PATTERN.matcher(resourceId).replaceAll("")); - } - dereferenceResultStatus = DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY; + return retriever.retrieve(resourceId, suffix); + } catch (IOException | URISyntaxException e) { + LOGGER.warn("Failed to retrieve: {} with message: {}", resourceId, e.getMessage()); + LOGGER.debug("Problem retrieving resource.", e); + return null; } - return new MongoDereferencedEntity(originalEntity, dereferenceResultStatus); + }).filter(Objects::nonNull).findAny().orElse(null); + + // Evaluate and return the result. + if (originalEntity == null && LOGGER.isInfoEnabled()) { + LOGGER.info("No entity XML for uri {}", sanitizeCRLF(resourceId)); } + final DereferenceResultStatus dereferenceResultStatus = originalEntity == null ? + DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY : DereferenceResultStatus.SUCCESS; + return new OriginalEntity(originalEntity, dereferenceResultStatus); } - DereferenceResultWrapper computeEnrichmentBaseVocabulary(String resourceId) - throws JAXBException, URISyntaxException { - // Try to get the entity and its vocabulary from the cache. + private TransformedEntity getFromCache(String resourceId) { + + // Try to find a cached entity. If there is none, we are done. final ProcessedEntity cachedEntity = processedEntityDao.getByResourceId(resourceId); - final DereferenceResultWrapper result = computeEntityVocabulary(resourceId, cachedEntity); + if (cachedEntity == null) { + return null; + } - // Parse the entity. - if (result.getEntity() == null || result.getVocabulary() == null) { - return new DereferenceResultWrapper(result.getDereferenceResultStatus()); + // Check the vocabulary. If it no longer exists, we need to process the entity again. + final Vocabulary vocabulary; + if (StringUtils.isNotBlank(cachedEntity.getVocabularyId())) { + vocabulary = vocabularyDao.get(cachedEntity.getVocabularyId()); + if (vocabulary == null) { + return null; + } } else { - return new DereferenceResultWrapper( - EnrichmentBaseConverter.convertToEnrichmentBase(result.getEntity()), - result.getVocabulary(), - result.getDereferenceResultStatus()); + vocabulary = null; } + + // Convert to a transformed entity and return. + return new TransformedEntity(vocabulary, cachedEntity.getXml(), + cachedEntity.getResultStatus()); + } + + private void saveToCache(String resourceId, TransformedEntity transformedEntity) { + final ProcessedEntity entityToCache = new ProcessedEntity(); + entityToCache.setResourceId(resourceId); + entityToCache.setXml(transformedEntity.entity()); + entityToCache.setVocabularyId(Optional.ofNullable(transformedEntity.vocabulary()) + .map(Vocabulary::getId).map(ObjectId::toString).orElse(null)); + entityToCache.setResultStatus(transformedEntity.resultStatus()); + processedEntityDao.save(entityToCache); } } diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferencedEntity.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferencedEntity.java deleted file mode 100644 index 083cdda2ad..0000000000 --- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/MongoDereferencedEntity.java +++ /dev/null @@ -1,30 +0,0 @@ -package eu.europeana.metis.dereference.service; - -import eu.europeana.enrichment.api.external.DereferenceResultStatus; - -/** - * Dereferenced entity with status - */ -public class MongoDereferencedEntity { - - private final String entity; - private final DereferenceResultStatus dereferenceResultStatus; - - /** - * Constructor for entity and status - * @param entity dereferenced entity - * @param dereferenceResultStatus status of the entity - */ - public MongoDereferencedEntity(String entity, DereferenceResultStatus dereferenceResultStatus) { - this.entity = entity; - this.dereferenceResultStatus = dereferenceResultStatus; - } - - public String getEntity() { - return entity; - } - - public DereferenceResultStatus getDereferenceResultStatus() { - return dereferenceResultStatus; - } -} diff --git a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/utils/VocabularyCandidates.java b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/utils/VocabularyCandidates.java index cd15977749..ddd4c21feb 100644 --- a/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/utils/VocabularyCandidates.java +++ b/metis-dereference/metis-dereference-service/src/main/java/eu/europeana/metis/dereference/service/utils/VocabularyCandidates.java @@ -1,6 +1,6 @@ package eu.europeana.metis.dereference.service.utils; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; import eu.europeana.metis.dereference.Vocabulary; import java.net.URI; @@ -56,7 +56,7 @@ public static VocabularyCandidates findVocabulariesForUrl(String resourceId, Function> searchInPersistence) throws URISyntaxException { // Initial search on the host name (already filtering the great majority of vocabularies). - final String searchString = new URI(resourceId.replace(" ", "%20")).getHost(); + final String searchString = new URI(resourceId).getHost(); final List searchedVocabularies = searchInPersistence.apply(searchString); // Narrow it down further: precisely match the URI. @@ -71,10 +71,10 @@ public static VocabularyCandidates findVocabulariesForUrl(String resourceId, // Log and done. if (candidates.isEmpty() && (LOGGER.isInfoEnabled())) { - LOGGER.info("No vocabularies found for uri {}", CRLF_PATTERN.matcher(resourceId).replaceAll("")); + LOGGER.info("No vocabularies found for uri {}", sanitizeCRLF(resourceId)); } if (candidates.size() > 1 && LOGGER.isWarnEnabled()) { - LOGGER.warn("Multiple vocabularies found for uri {}: {}", CRLF_PATTERN.matcher(resourceId).replaceAll(""), + LOGGER.warn("Multiple vocabularies found for uri {}: {}", sanitizeCRLF(resourceId), candidates.stream().map(Vocabulary::getName).collect(Collectors.joining(", "))); } return new VocabularyCandidates(candidates); diff --git a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java index 0791c3785a..be1e5510d4 100644 --- a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java +++ b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferenceServiceTest.java @@ -3,231 +3,246 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.clearInvocations; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import dev.morphia.Datastore; import eu.europeana.enrichment.api.external.DereferenceResultStatus; -import eu.europeana.enrichment.api.external.model.Concept; -import eu.europeana.enrichment.api.external.model.Place; import eu.europeana.metis.dereference.DereferenceResult; +import eu.europeana.metis.dereference.ProcessedEntity; import eu.europeana.metis.dereference.RdfRetriever; import eu.europeana.metis.dereference.Vocabulary; import eu.europeana.metis.dereference.service.dao.ProcessedEntityDao; import eu.europeana.metis.dereference.service.dao.VocabularyDao; -import eu.europeana.metis.mongo.embedded.EmbeddedLocalhostMongo; import java.io.IOException; +import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.Objects; -import jakarta.xml.bind.JAXBException; import org.apache.commons.io.IOUtils; -import org.junit.jupiter.api.AfterEach; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.stubbing.Answer; /** * Unit tests for {@link MongoDereferenceService} */ class MongoDereferenceServiceTest { - private MongoDereferenceService service; - private Datastore vocabularyDaoDatastore; - private final EmbeddedLocalhostMongo embeddedLocalhostMongo = new EmbeddedLocalhostMongo(); + private static final VocabularyDao vocabularyDao = mock(VocabularyDao.class); + private static final ProcessedEntityDao processedEntityDao = mock(ProcessedEntityDao.class); + private final RdfRetriever retriever = mock(RdfRetriever.class); + private final MongoDereferenceService dereferenceService = + spy(new MongoDereferenceService(retriever, processedEntityDao, vocabularyDao)); - @BeforeEach - void prepare() { - embeddedLocalhostMongo.start(); - String mongoHost = embeddedLocalhostMongo.getMongoHost(); - int mongoPort = embeddedLocalhostMongo.getMongoPort(); - - MongoClient mongoClient = MongoClients.create(String.format("mongodb://%s:%s", mongoHost, mongoPort)); - VocabularyDao vocabularyDao = new VocabularyDao(mongoClient, "voctest") { - { - vocabularyDaoDatastore = this.getDatastore(); - } - }; - ProcessedEntityDao processedEntityDao = new ProcessedEntityDao(mongoClient, "processedEntity"); - service = spy(new MongoDereferenceService(new RdfRetriever(), processedEntityDao, vocabularyDao)); - } + private static final String GEONAMES_URI = "http://sws.geonames.org/"; + private static Vocabulary geonames; + private static final String PLACE_ID = "http://sws.geonames.org/3020251/"; + private static String placeSourceEntity; - @AfterEach - void destroy() { - embeddedLocalhostMongo.stop(); - } + private static final Map CACHE = new HashMap<>(); - @Test - void testDereference_Success() throws JAXBException, IOException, URISyntaxException { - // Create vocabulary for geonames and save it. - final Vocabulary geonames = new Vocabulary(); - geonames.setUris(Collections.singleton("http://sws.geonames.org/")); - geonames.setXslt(IOUtils - .toString(Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("geonames.xsl")), - StandardCharsets.UTF_8)); + @BeforeAll + static void prepareData() throws IOException { + + // Create the vocabulary + geonames = new Vocabulary(); + geonames.setId(new ObjectId(new Date())); + geonames.setUris(Collections.singleton(GEONAMES_URI)); + geonames.setXslt(IOUtils.toString(Objects.requireNonNull(MongoDereferenceServiceTest.class + .getClassLoader().getResourceAsStream("geonames.xsl")), StandardCharsets.UTF_8)); geonames.setName("Geonames"); geonames.setIterations(0); - vocabularyDaoDatastore.save(geonames); - // Create geonames entity - final Place place = new Place(); - final String entityId = "http://sws.geonames.org/3020251/"; - place.setAbout(entityId); - - // Mock the service - doReturn(new DereferenceResultWrapper(place, geonames, DereferenceResultStatus.SUCCESS)).when(service) - .computeEnrichmentBaseVocabulary( - entityId); - - // Test the method - final DereferenceResult result = service.dereference(entityId); - assertNotNull(result); - assertEquals(1, result.getEnrichmentBasesAsList().size()); - assertSame(place, result.getEnrichmentBasesAsList().get(0)); - assertEquals(DereferenceResultStatus.SUCCESS, result.getDereferenceStatus()); + // Create the place + placeSourceEntity = IOUtils.toString(Objects.requireNonNull(MongoDereferenceServiceTest.class + .getClassLoader().getResourceAsStream("place_entity.xsl")), StandardCharsets.UTF_8); } - @Test - void testDereference_IllegalArgument() { - final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> service.dereference(null)); - assertEquals("Parameter resourceId cannot be null.", exception.getMessage()); + @BeforeEach + void resetMocks() throws IOException, URISyntaxException { + + // Reset the mocks + reset(vocabularyDao, processedEntityDao, retriever, dereferenceService); + + // Add support for vocabulary in the mocks. + final String searchString = new URI(GEONAMES_URI).getHost(); + doReturn(List.of(geonames)).when(vocabularyDao).getByUriSearch(searchString); + doReturn(geonames).when(vocabularyDao).get(geonames.getId().toString()); + + // Add support for the place in the mocks. + doReturn(placeSourceEntity).when(retriever).retrieve(eq(PLACE_ID), anyString()); + + // Clear cache and build the cache functionality + CACHE.clear(); + doAnswer((Answer) invocation -> { + final ProcessedEntity entity = invocation.getArgument(0); + CACHE.put(entity.getResourceId(), entity); + return null; + }).when(processedEntityDao).save(any()); + doAnswer((Answer) invocation -> CACHE.get((String) invocation.getArgument(0))) + .when(processedEntityDao).getByResourceId(anyString()); } @Test - void testDereference_AbsentObject() throws JAXBException, URISyntaxException { - final String entityId = "http://sws.geonames.org/3020251/"; - doReturn(new DereferenceResultWrapper(DereferenceResultStatus.SUCCESS)) - .when(service).computeEnrichmentBaseVocabulary(entityId); - final DereferenceResult emptyResult = service.dereference(entityId); - assertNotNull(emptyResult); - assertTrue(emptyResult.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, emptyResult.getDereferenceStatus()); + void testDereference_Success() throws IOException, URISyntaxException { + + // First time: no cached item available + final DereferenceResult result0 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result0); + assertEquals(1, result0.getEnrichmentBasesAsList().size()); + assertEquals(PLACE_ID, result0.getEnrichmentBasesAsList().getFirst().getAbout()); + assertEquals(DereferenceResultStatus.SUCCESS, result0.getDereferenceStatus()); + verify(vocabularyDao, times(1)).getByUriSearch(anyString()); + verify(retriever, times(1)).retrieve(eq(PLACE_ID), anyString()); + assertTrue(CACHE.containsKey(PLACE_ID)); + assertEquals(result0.getDereferenceStatus(), CACHE.get(PLACE_ID).getResultStatus()); + + // Second time: use cache, no second retrieval. + clearInvocations(vocabularyDao, retriever); + final DereferenceResult result1 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result1); + assertEquals(1, result1.getEnrichmentBasesAsList().size()); + assertEquals(PLACE_ID, result1.getEnrichmentBasesAsList().getFirst().getAbout()); + assertEquals(DereferenceResultStatus.SUCCESS, result1.getDereferenceStatus()); + verify(vocabularyDao, never()).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(eq(PLACE_ID), anyString()); } @Test - void testDereference_UnknownEuropeanaEntity() throws JAXBException, URISyntaxException, IOException { - // Create vocabulary for geonames and save it. - final Vocabulary geonames = new Vocabulary(); - geonames.setUris(Collections.singleton("http://sws.geonames.org/")); - geonames.setXslt(IOUtils - .toString(Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("geonames.xsl")), - StandardCharsets.UTF_8)); - geonames.setName("Geonames"); - geonames.setIterations(0); - vocabularyDaoDatastore.save(geonames); - - // Create geonames entity - final Place place = new Place(); - final String entityId = "http://sws.geonames.org/3020251/"; - place.setAbout(entityId); - doReturn(new DereferenceResultWrapper(place, geonames, null)) - .when(service).computeEnrichmentBaseVocabulary(entityId); - final DereferenceResult emptyResult = service.dereference(entityId); - assertNotNull(emptyResult); - assertFalse(emptyResult.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.SUCCESS, emptyResult.getDereferenceStatus()); + void testDereference_IllegalArgument() { + final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dereferenceService.dereference(null)); + assertEquals("Parameter resourceId cannot be null.", exception.getMessage()); } @Test - void testDereference_NoVocabularyMatching() { - // Create concept - final Concept concept = new Concept(); - final String entityId = "http://data.europeana.eu/concept/XXXXXXXXX"; - concept.setAbout(entityId); - - final DereferenceResult emptyResult = service.dereference(entityId); - assertNotNull(emptyResult); - assertTrue(emptyResult.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, emptyResult.getDereferenceStatus()); + void testDereference_NoVocabularyMatching() throws IOException, URISyntaxException { + + // First time: no cached item available + final String nonExistingVocabularyEntity = "http://XXX.YYYYYYY.org/3020251/"; + final DereferenceResult result0 = dereferenceService.dereference(nonExistingVocabularyEntity); + assertNotNull(result0); + assertTrue(result0.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, result0.getDereferenceStatus()); + verify(vocabularyDao, times(1)).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(anyString(), anyString()); + assertTrue(CACHE.containsKey(nonExistingVocabularyEntity)); + assertEquals(result0.getDereferenceStatus(), CACHE.get(nonExistingVocabularyEntity).getResultStatus()); + + // Second time: use cache, no second retrieval. + clearInvocations(vocabularyDao, retriever); + final DereferenceResult result1 = dereferenceService.dereference(nonExistingVocabularyEntity); + assertNotNull(result1); + assertTrue(result1.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, result1.getDereferenceStatus()); + verify(vocabularyDao, never()).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(eq(nonExistingVocabularyEntity), anyString()); } @Test - void testDereference_NoEntityForVocabulary() throws JAXBException, URISyntaxException, IOException { - // Create vocabulary for geonames and save it. - final Vocabulary geonames = new Vocabulary(); - geonames.setUris(Collections.singleton("http://sws.geonames.org/")); - geonames.setXslt(IOUtils - .toString(Objects.requireNonNull(this.getClass().getClassLoader().getResourceAsStream("geonames.xsl")), - StandardCharsets.UTF_8)); - geonames.setName("Geonames"); - geonames.setIterations(0); - vocabularyDaoDatastore.save(geonames); - - // Create geonames entity - final Place place = new Place(); - final String entityId = "http://sws.geonames.org/302025X/"; - place.setAbout(entityId); - - final DereferenceResult emptyResult = service.dereference(entityId); - assertNotNull(emptyResult); - assertTrue(emptyResult.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY, emptyResult.getDereferenceStatus()); + void testDereference_NoEntityForVocabulary() throws IOException, URISyntaxException { + + // First time: no cached item available + final String nonExistingId = GEONAMES_URI + "XXXXXX"; + final DereferenceResult result0 = dereferenceService.dereference(nonExistingId); + assertNotNull(result0); + assertTrue(result0.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY, result0.getDereferenceStatus()); + verify(vocabularyDao, times(1)).getByUriSearch(anyString()); + verify(retriever, times(1)).retrieve(eq(nonExistingId), anyString()); + assertTrue(CACHE.containsKey(nonExistingId)); + assertEquals(result0.getDereferenceStatus(), CACHE.get(nonExistingId).getResultStatus()); + + // Second time: use cache, no second retrieval. + clearInvocations(vocabularyDao, retriever); + final DereferenceResult result1 = dereferenceService.dereference(nonExistingId); + assertNotNull(result1); + assertTrue(result1.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.NO_ENTITY_FOR_VOCABULARY, result1.getDereferenceStatus()); + verify(vocabularyDao, never()).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(eq(nonExistingId), anyString()); } @Test - void testDereference_InvalidUrl() throws JAXBException, URISyntaxException { - final String entityId = "http://sws.geonames.org/3020251/"; + void testDereference_InvalidUrl() throws IOException, URISyntaxException { - // Mock the service - doThrow(new URISyntaxException("uri", "is invalid")) - .when(service).computeEnrichmentBaseVocabulary(entityId); + // Entity ID: ensure that it is indeed invalid. + final String entityId = "http://sws.geonames.org/?_)(*&^%$#@!3020251/"; + assertThrows(URISyntaxException.class, () -> new URI(entityId)); - // Test the method - final DereferenceResult result = service.dereference(entityId); + // Try to dereference. There should not be a cached item created. + final DereferenceResult result = dereferenceService.dereference(entityId); assertNotNull(result); assertTrue(result.getEnrichmentBasesAsList().isEmpty()); assertEquals(DereferenceResultStatus.INVALID_URL, result.getDereferenceStatus()); + verify(retriever, never()).retrieve(anyString(), anyString()); + assertFalse(CACHE.containsKey(entityId)); } @Test - void testDereference_XmlXsltError() throws JAXBException, URISyntaxException { - final String entityId = "http://sws.geonames.org/3020251/"; - - // Mock the service - doThrow(new JAXBException("xml or xslt", "is invalid")) - .when(service).computeEnrichmentBaseVocabulary(entityId); - - // Test the method - final DereferenceResult result = service.dereference(entityId); - assertNotNull(result); - assertTrue(result.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR, result.getDereferenceStatus()); - } - - @Test - void testDereference_XmlXsltProduceNoContextualClass() throws JAXBException, URISyntaxException { - final String entityId = "http://www.yso.fi/onto/yso/p105069"; - - // Mock the service - doReturn(new DereferenceResultWrapper(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS)) - .when(service).computeEnrichmentBaseVocabulary(entityId); - - //Test the method - final DereferenceResult result = service.dereference(entityId); - assertNotNull(result); - assertTrue(result.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS, result.getDereferenceStatus()); + void testDereference_XmlXsltError() throws URISyntaxException, IOException { + + // First time: no cached item available + doReturn("THIS WILL BE AN ERROR").when(retriever).retrieve(eq(PLACE_ID), anyString()); + final DereferenceResult result0 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result0); + assertTrue(result0.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR, result0.getDereferenceStatus()); + verify(vocabularyDao, times(1)).getByUriSearch(anyString()); + verify(retriever, times(1)).retrieve(eq(PLACE_ID), anyString()); + assertTrue(CACHE.containsKey(PLACE_ID)); + assertEquals(result0.getDereferenceStatus(), CACHE.get(PLACE_ID).getResultStatus()); + + // Second time: use cache, no second retrieval. + clearInvocations(vocabularyDao, retriever); + final DereferenceResult result1 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result1); + assertTrue(result1.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_ERROR, result1.getDereferenceStatus()); + verify(vocabularyDao, never()).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(eq(PLACE_ID), anyString()); } @Test - void testDereference_IdempotentResult() { - final String entityId = "https://d-nb.info/gnd/XXXX"; - - //Test the method - DereferenceResult result = service.dereference(entityId); - assertNotNull(result); - assertTrue(result.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, result.getDereferenceStatus()); - //Test it again it should remain consistent - result = service.dereference(entityId); - assertNotNull(result); - assertTrue(result.getEnrichmentBasesAsList().isEmpty()); - assertEquals(DereferenceResultStatus.NO_VOCABULARY_MATCHING, result.getDereferenceStatus()); + void testDereference_XmlXsltProduceNoContextualClass() throws URISyntaxException, IOException { + + // First time: no cached item available + doReturn("").when(retriever).retrieve(eq(PLACE_ID), anyString()); + final DereferenceResult result0 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result0); + assertTrue(result0.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS, result0.getDereferenceStatus()); + verify(vocabularyDao, times(1)).getByUriSearch(anyString()); + verify(retriever, times(1)).retrieve(eq(PLACE_ID), anyString()); + assertTrue(CACHE.containsKey(PLACE_ID)); + assertEquals(result0.getDereferenceStatus(), CACHE.get(PLACE_ID).getResultStatus()); + + // Second time: use cache, no second retrieval. + clearInvocations(vocabularyDao, retriever); + final DereferenceResult result1 = dereferenceService.dereference(PLACE_ID); + assertNotNull(result1); + assertTrue(result1.getEnrichmentBasesAsList().isEmpty()); + assertEquals(DereferenceResultStatus.ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS, result1.getDereferenceStatus()); + verify(vocabularyDao, never()).getByUriSearch(anyString()); + verify(retriever, never()).retrieve(eq(PLACE_ID), anyString()); } } diff --git a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferencingManagementServiceTest.java b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferencingManagementServiceTest.java index 0bbf94c130..a3c7d4477f 100644 --- a/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferencingManagementServiceTest.java +++ b/metis-dereference/metis-dereference-service/src/test/java/eu/europeana/metis/dereference/service/MongoDereferencingManagementServiceTest.java @@ -1,6 +1,8 @@ package eu.europeana.metis.dereference.service; +import static java.util.Objects.requireNonNull; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; @@ -44,7 +46,7 @@ class MongoDereferencingManagementServiceTest { private final ProcessedEntityDao processedEntityDao = mock(ProcessedEntityDao.class); @BeforeEach - void prepare() throws IOException { + void prepare() { embeddedLocalhostMongo.start(); String mongoHost = embeddedLocalhostMongo.getMongoHost(); int mongoPort = embeddedLocalhostMongo.getMongoPort(); @@ -118,26 +120,26 @@ void purgeCacheByVocabularyId() { } @Test - void loadVocabularies_expectSucess() throws VocabularyImportException, URISyntaxException, IOException { - final URL resourceLocation = this.getClass().getClassLoader().getResource("vocabulary.yml"); - final String expectedXslt = Files.readString(Paths.get(getClass() - .getClassLoader() - .getResource("vocabulary/voctest.xsl").toURI())).trim(); + void loadVocabularies_expectSuccess() throws VocabularyImportException, URISyntaxException, IOException { + final URL vocabularyUrl = this.getClass().getClassLoader().getResource("vocabulary.yml"); + final URL vocabularyTestXslUrl = requireNonNull(getClass().getClassLoader().getResource("vocabulary/voctest.xsl")); + final String expectedXslt = Files.readString(Paths.get(vocabularyTestXslUrl.toURI())).trim(); - final VocabularyCollectionImporter importer = new VocabularyCollectionImporterFactory().createImporter(resourceLocation); + final VocabularyCollectionImporter importer = new VocabularyCollectionImporterFactory().createImporter(vocabularyUrl); doReturn(importer).when(vocabularyCollectionImporterFactory).createImporter(any(URL.class)); - service.loadVocabularies(resourceLocation); + service.loadVocabularies(vocabularyUrl); Vocabulary vocabulary = vocabularyDaoDatastore.find(Vocabulary.class).first(); + assertNotNull(vocabulary); assertEquals("TestWikidata", vocabulary.getName()); assertEquals(expectedXslt, vocabulary.getXslt()); - assertEquals("http://www.wikidata.org/entity/", vocabulary.getUris().stream().findFirst().get()); - verify(vocabularyCollectionImporterFactory, times(1)).createImporter(resourceLocation); + assertEquals("http://www.wikidata.org/entity/", vocabulary.getUris().stream().findFirst().orElse(null)); + verify(vocabularyCollectionImporterFactory, times(1)).createImporter(vocabularyUrl); } @Test - void loadVocabularies_expectVocabularyImportError() throws VocabularyImportException, URISyntaxException, IOException { + void loadVocabularies_expectVocabularyImportError() { final URL resourceLocation = this.getClass().getClassLoader().getResource("vocabulary-fault.yml"); final VocabularyCollectionImporter importer = new VocabularyCollectionImporterFactory().createImporter(resourceLocation); doReturn(importer).when(vocabularyCollectionImporterFactory).createImporter(any(URL.class)); diff --git a/metis-dereference/metis-dereference-service/src/test/resources/place_entity.xsl b/metis-dereference/metis-dereference-service/src/test/resources/place_entity.xsl new file mode 100644 index 0000000000..b6207fdb77 --- /dev/null +++ b/metis-dereference/metis-dereference-service/src/test/resources/place_entity.xsl @@ -0,0 +1,38 @@ + + + + + Embrun + Aebura + Ambrun + Eburodunum + + + FR + 7069 + 05200 + 05201 CEDEX + 05202 CEDEX + 05208 CEDEX + 05209 CEDEX + 44.56387 + 6.49526 + + + + + + + + + + + + + + + GeoNames + 2006-01-15 + 2024-06-14 + + diff --git a/metis-dereference/pom.xml b/metis-dereference/pom.xml index 0fc1d329e6..fbefec6459 100644 --- a/metis-dereference/pom.xml +++ b/metis-dereference/pom.xml @@ -4,7 +4,7 @@ 4.0.0 eu.europeana.metis - 12.2 + 13 metis-framework diff --git a/metis-enrichment/metis-enrichment-client/pom.xml b/metis-enrichment/metis-enrichment-client/pom.xml index dcca23022f..447c6ee901 100644 --- a/metis-enrichment/metis-enrichment-client/pom.xml +++ b/metis-enrichment/metis-enrichment-client/pom.xml @@ -4,7 +4,7 @@ metis-enrichment eu.europeana.metis - 12.2 + 13 metis-enrichment-client jar diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java index f36de0afa1..dcaaf172a8 100644 --- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImpl.java @@ -114,7 +114,7 @@ private static void setDereferenceStatusInReport(String resourceId, HashSet "Entity was found, applying the XSLT results in an XML error" - .concat("either because the entity is malformed or the XSLT is malformed)."); + .concat(" either because the entity is malformed or the XSLT is malformed."); case ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS -> "Entity was found, but the XSLT mapping did not produce a contextual class."; case INVALID_URL -> "A URL to be dereferenced is invalid."; @@ -315,7 +315,7 @@ private DereferencedEntities dereferenceExternalEntity(Set refere resultMap.put(referenceTerm, Optional.ofNullable(result).map(EnrichmentResultList::getEnrichmentBaseResultWrapperList) .orElseGet(Collections::emptyList).stream() .map(EnrichmentResultBaseWrapper::getEnrichmentBaseList).filter(Objects::nonNull) - .flatMap(List::stream).collect(Collectors.toList())); + .flatMap(List::stream).toList()); } // Return the result. diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImpl.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImpl.java index a40b4db059..b7d10e563a 100644 --- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImpl.java +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImpl.java @@ -6,6 +6,7 @@ import eu.europeana.enrichment.api.external.model.EnrichmentBase; import eu.europeana.enrichment.api.internal.AbstractSearchTerm; import eu.europeana.enrichment.api.internal.EntityResolver; +import eu.europeana.enrichment.api.internal.FieldType; import eu.europeana.enrichment.api.internal.ProxyFieldType; import eu.europeana.enrichment.api.internal.RecordParser; import eu.europeana.enrichment.api.internal.ReferenceTermContext; @@ -13,7 +14,9 @@ import eu.europeana.enrichment.rest.client.report.Report; import eu.europeana.enrichment.utils.EnrichmentUtils; import eu.europeana.enrichment.utils.EntityMergeEngine; +import eu.europeana.enrichment.utils.EntityType; import eu.europeana.enrichment.utils.RdfEntityUtils; +import eu.europeana.metis.schema.jibx.AboutType; import eu.europeana.metis.schema.jibx.ProxyType; import eu.europeana.metis.schema.jibx.RDF; import java.util.Arrays; @@ -214,7 +217,14 @@ private HashSet getSearchTermsReport(Set searchTerms, Map> enrichedValues) { HashSet reports = new HashSet<>(); for (SearchTermContext searchTerm : searchTerms) { - if (enrichedValues.get(searchTerm).isEmpty()) { + if (fieldTypeContainsOrganizationType(searchTerm.getFieldTypes()) + && enrichedValues.get(searchTerm).isEmpty()) { + reports.add(Report + .buildEnrichmentWarn() + .withMessage("Could not find an entity for the given search term with type Organization.") + .withValue(searchTerm.getTextValue()) + .build()); + } else if (enrichedValues.get(searchTerm).isEmpty()) { reports.add(Report .buildEnrichmentIgnore() .withMessage("Could not find an entity for the given search term.") @@ -239,4 +249,13 @@ private HashSet getSearchReferenceReport(Set refer } return reports; } + + private boolean fieldTypeContainsOrganizationType(Set> fieldTypes) { + for(FieldType fieldType : fieldTypes){ + if(fieldType.getEntityType().equals(EntityType.ORGANIZATION)) + return true; + } + + return false; + } } diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityConverterUtils.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityConverterUtils.java new file mode 100644 index 0000000000..16e0e820fb --- /dev/null +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityConverterUtils.java @@ -0,0 +1,244 @@ +package eu.europeana.enrichment.utils; + +import eu.europeana.enrichment.api.external.model.Agent; +import eu.europeana.enrichment.api.external.model.Concept; +import eu.europeana.enrichment.api.external.model.Organization; +import eu.europeana.enrichment.api.external.model.Part; +import eu.europeana.enrichment.api.external.model.Place; +import eu.europeana.enrichment.api.external.model.TimeSpan; +import eu.europeana.metis.schema.jibx.AgentType; +import eu.europeana.metis.schema.jibx.Alt; +import eu.europeana.metis.schema.jibx.AltLabel; +import eu.europeana.metis.schema.jibx.Begin; +import eu.europeana.metis.schema.jibx.BiographicalInformation; +import eu.europeana.metis.schema.jibx.BroadMatch; +import eu.europeana.metis.schema.jibx.Broader; +import eu.europeana.metis.schema.jibx.CloseMatch; +import eu.europeana.metis.schema.jibx.Concept.Choice; +import eu.europeana.metis.schema.jibx.Date; +import eu.europeana.metis.schema.jibx.DateOfBirth; +import eu.europeana.metis.schema.jibx.DateOfDeath; +import eu.europeana.metis.schema.jibx.DateOfEstablishment; +import eu.europeana.metis.schema.jibx.DateOfTermination; +import eu.europeana.metis.schema.jibx.End; +import eu.europeana.metis.schema.jibx.ExactMatch; +import eu.europeana.metis.schema.jibx.Gender; +import eu.europeana.metis.schema.jibx.HasMet; +import eu.europeana.metis.schema.jibx.HasPart; +import eu.europeana.metis.schema.jibx.HiddenLabel; +import eu.europeana.metis.schema.jibx.Identifier; +import eu.europeana.metis.schema.jibx.InScheme; +import eu.europeana.metis.schema.jibx.IsNextInSequence; +import eu.europeana.metis.schema.jibx.IsPartOf; +import eu.europeana.metis.schema.jibx.IsRelatedTo; +import eu.europeana.metis.schema.jibx.Lat; +import eu.europeana.metis.schema.jibx.NarrowMatch; +import eu.europeana.metis.schema.jibx.Narrower; +import eu.europeana.metis.schema.jibx.Notation; +import eu.europeana.metis.schema.jibx.Note; +import eu.europeana.metis.schema.jibx.PlaceOfBirth; +import eu.europeana.metis.schema.jibx.PlaceOfDeath; +import eu.europeana.metis.schema.jibx.PlaceType; +import eu.europeana.metis.schema.jibx.PrefLabel; +import eu.europeana.metis.schema.jibx.ProfessionOrOccupation; +import eu.europeana.metis.schema.jibx.Related; +import eu.europeana.metis.schema.jibx.RelatedMatch; +import eu.europeana.metis.schema.jibx.SameAs; +import eu.europeana.metis.schema.jibx.TimeSpanType; +import eu.europeana.metis.schema.jibx._Long; +import java.util.ArrayList; +import java.util.List; + +/** + * Contains methods for converting enrichment entity classes to jibx entity classes + */ +public final class EntityConverterUtils { + + private EntityConverterUtils() { + } + + static eu.europeana.metis.schema.jibx.Organization convertOrganization( + Organization organization) { + final eu.europeana.metis.schema.jibx.Organization organizationType = new eu.europeana.metis.schema.jibx.Organization(); + organizationType.setAbout(organization.getAbout()); + organizationType.setPrefLabelList( + ItemExtractorUtils.extractLabels(organization.getPrefLabelList(), PrefLabel::new)); + return organizationType; + } + + static TimeSpanType convertTimeSpan(TimeSpan timespan) { + + TimeSpanType timeSpanType = new TimeSpanType(); + + ItemExtractorUtils.setAbout(timespan, timeSpanType); + timeSpanType.setAltLabelList( + ItemExtractorUtils.extractLabels(timespan.getAltLabelList(), AltLabel::new)); + timeSpanType.setBegin(ItemExtractorUtils.extractLabel(timespan.getBegin(), Begin::new)); + timeSpanType.setEnd(ItemExtractorUtils.extractLabel(timespan.getEnd(), End::new)); + timeSpanType.setHasPartList( + ItemExtractorUtils.extractLabelResources(timespan.getHasPartsList(), HasPart::new)); + if (timespan.getIsNextInSequence() != null) { + timeSpanType.setIsNextInSequence(ItemExtractorUtils + .extractAsResource(timespan.getIsNextInSequence(), IsNextInSequence::new, + Part::getResource)); + } + if (timespan.getIsPartOf() != null) { + timeSpanType.setIsPartOfList( + ItemExtractorUtils.extractLabelResources(timespan.getIsPartOf(), IsPartOf::new)); + } + timeSpanType.setNoteList(ItemExtractorUtils.extractLabels(timespan.getNotes(), Note::new)); + timeSpanType.setPrefLabelList( + ItemExtractorUtils.extractLabels(timespan.getPrefLabelList(), PrefLabel::new)); + timeSpanType + .setSameAList(ItemExtractorUtils.extractResources(timespan.getSameAs(), SameAs::new)); + timeSpanType.setHiddenLabelList( + ItemExtractorUtils.extractLabels(timespan.getHiddenLabel(), HiddenLabel::new)); + + return timeSpanType; + } + + static eu.europeana.metis.schema.jibx.Concept convertConcept(Concept baseConcept) { + + eu.europeana.metis.schema.jibx.Concept concept = new eu.europeana.metis.schema.jibx.Concept(); + + ItemExtractorUtils.setAbout(baseConcept, concept); + final List choices = new ArrayList<>(); + + final List altLabels = ItemExtractorUtils + .extractLabels(baseConcept.getAltLabelList(), AltLabel::new); + ItemExtractorUtils.toChoices(altLabels, Choice::setAltLabel, choices); + + final List broadMatches = ItemExtractorUtils + .extractResources(baseConcept.getBroadMatch(), BroadMatch::new); + ItemExtractorUtils.toChoices(broadMatches, Choice::setBroadMatch, choices); + + final List broaders = ItemExtractorUtils + .extractResources(baseConcept.getBroader(), Broader::new); + ItemExtractorUtils.toChoices(broaders, Choice::setBroader, choices); + + final List closeMatches = ItemExtractorUtils + .extractResources(baseConcept.getCloseMatch(), CloseMatch::new); + ItemExtractorUtils.toChoices(closeMatches, Choice::setCloseMatch, choices); + + final List exactMatches = ItemExtractorUtils + .extractResources(baseConcept.getExactMatch(), ExactMatch::new); + ItemExtractorUtils.toChoices(exactMatches, Choice::setExactMatch, choices); + + final List inSchemes = ItemExtractorUtils + .extractResources(baseConcept.getInScheme(), InScheme::new); + ItemExtractorUtils.toChoices(inSchemes, Choice::setInScheme, choices); + + final List narrowers = ItemExtractorUtils + .extractResources(baseConcept.getNarrower(), Narrower::new); + ItemExtractorUtils.toChoices(narrowers, Choice::setNarrower, choices); + + final List narrowMatches = ItemExtractorUtils + .extractResources(baseConcept.getNarrowMatch(), NarrowMatch::new); + ItemExtractorUtils.toChoices(narrowMatches, Choice::setNarrowMatch, choices); + + final List notations = ItemExtractorUtils + .extractLabels(baseConcept.getNotation(), Notation::new); + ItemExtractorUtils.toChoices(notations, Choice::setNotation, choices); + + final List notes = ItemExtractorUtils.extractLabels(baseConcept.getNotes(), Note::new); + ItemExtractorUtils.toChoices(notes, Choice::setNote, choices); + + final List prefLabels = ItemExtractorUtils + .extractLabels(baseConcept.getPrefLabelList(), PrefLabel::new); + ItemExtractorUtils.toChoices(prefLabels, Choice::setPrefLabel, choices); + + final List relateds = ItemExtractorUtils + .extractResources(baseConcept.getRelated(), Related::new); + ItemExtractorUtils.toChoices(relateds, Choice::setRelated, choices); + + final List relatedMatches = ItemExtractorUtils + .extractResources(baseConcept.getRelatedMatch(), RelatedMatch::new); + ItemExtractorUtils.toChoices(relatedMatches, Choice::setRelatedMatch, choices); + + concept.setChoiceList(choices); + + return concept; + } + + static AgentType convertAgent(Agent agent) { + + AgentType agentType = new AgentType(); + + ItemExtractorUtils.setAbout(agent, agentType); + agentType.setAltLabelList(ItemExtractorUtils.extractLabels(agent.getAltLabelList(), AltLabel::new)); + agentType.setBegin(ItemExtractorUtils.extractFirstLabel(agent.getBeginList(), Begin::new)); + agentType.setBiographicalInformationList(ItemExtractorUtils.extractLabelResources(agent.getBiographicalInformation(), + BiographicalInformation::new)); + agentType.setProfessionOrOccupationList( + ItemExtractorUtils.extractLabelResources(agent.getProfessionOrOccupation(), ProfessionOrOccupation::new)); + agentType.setDateList(ItemExtractorUtils.extractLabelResources(agent.getDate(), Date::new)); + agentType.setPlaceOfBirthList( + ItemExtractorUtils.extractLabelResources(agent.getPlaceOfBirth(), PlaceOfBirth::new)); + agentType.setPlaceOfDeathList( + ItemExtractorUtils.extractLabelResources(agent.getPlaceOfDeath(), PlaceOfDeath::new)); + agentType.setDateOfBirth(ItemExtractorUtils.extractFirstLabel(agent.getDateOfBirth(), DateOfBirth::new)); + agentType.setDateOfDeath(ItemExtractorUtils.extractFirstLabel(agent.getDateOfDeath(), DateOfDeath::new)); + agentType.setDateOfEstablishment(ItemExtractorUtils + .extractFirstLabel(agent.getDateOfEstablishment(), DateOfEstablishment::new)); + agentType.setDateOfTermination( + ItemExtractorUtils.extractFirstLabel(agent.getDateOfTermination(), DateOfTermination::new)); + agentType.setEnd(ItemExtractorUtils.extractFirstLabel(agent.getEndList(), End::new)); + agentType.setGender(ItemExtractorUtils.extractFirstLabel(agent.getGender(), Gender::new)); + agentType.setHasMetList(ItemExtractorUtils.extractResources(agent.getHasMet(), HasMet::new)); + + // hasPartList: not available + + agentType.setIdentifierList( + ItemExtractorUtils.extractLabels(agent.getIdentifier(), Identifier::new)); + + // isPartOfList: not available + + agentType.setIsRelatedToList( + ItemExtractorUtils.extractLabelResources(agent.getIsRelatedTo(), IsRelatedTo::new)); + + // nameList: not available + + agentType.setNoteList(ItemExtractorUtils.extractLabels(agent.getNotes(), Note::new)); + agentType.setPrefLabelList( + ItemExtractorUtils.extractLabels(agent.getPrefLabelList(), PrefLabel::new)); + agentType.setProfessionOrOccupationList(ItemExtractorUtils + .extractLabelResources(agent.getProfessionOrOccupation(), ProfessionOrOccupation::new)); + agentType.setSameAList(ItemExtractorUtils.extractResources(agent.getSameAs(), SameAs::new)); + + return agentType; + } + + static PlaceType convertPlace(Place place) { + + PlaceType placeType = new PlaceType(); + + ItemExtractorUtils.setAbout(place, placeType); + if (place.getAlt() != null) { + Alt alt = new Alt(); + alt.setAlt(Float.valueOf(place.getAlt())); + placeType.setAlt(alt); + } + placeType.setAltLabelList(ItemExtractorUtils.extractLabels(place.getAltLabelList(), AltLabel::new)); + placeType.setHasPartList(ItemExtractorUtils.extractLabelResources(place.getHasPartsList(), HasPart::new)); + if (place.getIsPartOf() != null) { + placeType.setIsPartOfList( + ItemExtractorUtils.extractLabelResources(place.getIsPartOf(), IsPartOf::new)); + } + if (place.getLat() != null) { + Lat lat = new Lat(); + lat.setLat(Float.valueOf(place.getLat())); + placeType.setLat(lat); + } + if (place.getLon() != null) { + _Long longitude = new _Long(); + longitude.setLong(Float.valueOf(place.getLon())); + placeType.setLong(longitude); + } + placeType.setNoteList(ItemExtractorUtils.extractLabels(place.getNotes(), Note::new)); + placeType.setPrefLabelList(ItemExtractorUtils.extractLabels(place.getPrefLabelList(), PrefLabel::new)); + placeType.setSameAList(ItemExtractorUtils.extractResources(place.getSameAs(), SameAs::new)); + + // isNextInSequence: not available + return placeType; + } +} diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java index eb75667dd9..5dc2126d7d 100644 --- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/EntityMergeEngine.java @@ -8,7 +8,6 @@ import eu.europeana.enrichment.api.external.model.Concept; import eu.europeana.enrichment.api.external.model.EnrichmentBase; import eu.europeana.enrichment.api.external.model.Organization; -import eu.europeana.enrichment.api.external.model.Part; import eu.europeana.enrichment.api.external.model.Place; import eu.europeana.enrichment.api.external.model.TimeSpan; import eu.europeana.enrichment.api.internal.AggregationFieldType; @@ -19,48 +18,8 @@ import eu.europeana.enrichment.api.internal.SearchTermContext; import eu.europeana.enrichment.rest.client.dereference.DereferencedEntities; import eu.europeana.metis.schema.jibx.AboutType; -import eu.europeana.metis.schema.jibx.AgentType; import eu.europeana.metis.schema.jibx.Aggregation; -import eu.europeana.metis.schema.jibx.Alt; -import eu.europeana.metis.schema.jibx.AltLabel; -import eu.europeana.metis.schema.jibx.Begin; -import eu.europeana.metis.schema.jibx.BiographicalInformation; -import eu.europeana.metis.schema.jibx.BroadMatch; -import eu.europeana.metis.schema.jibx.Broader; -import eu.europeana.metis.schema.jibx.CloseMatch; -import eu.europeana.metis.schema.jibx.Concept.Choice; -import eu.europeana.metis.schema.jibx.Date; -import eu.europeana.metis.schema.jibx.DateOfBirth; -import eu.europeana.metis.schema.jibx.DateOfDeath; -import eu.europeana.metis.schema.jibx.DateOfEstablishment; -import eu.europeana.metis.schema.jibx.DateOfTermination; -import eu.europeana.metis.schema.jibx.End; -import eu.europeana.metis.schema.jibx.ExactMatch; -import eu.europeana.metis.schema.jibx.Gender; -import eu.europeana.metis.schema.jibx.HasMet; -import eu.europeana.metis.schema.jibx.HasPart; -import eu.europeana.metis.schema.jibx.HiddenLabel; -import eu.europeana.metis.schema.jibx.Identifier; -import eu.europeana.metis.schema.jibx.InScheme; -import eu.europeana.metis.schema.jibx.IsNextInSequence; -import eu.europeana.metis.schema.jibx.IsPartOf; -import eu.europeana.metis.schema.jibx.IsRelatedTo; -import eu.europeana.metis.schema.jibx.Lat; -import eu.europeana.metis.schema.jibx.NarrowMatch; -import eu.europeana.metis.schema.jibx.Narrower; -import eu.europeana.metis.schema.jibx.Notation; -import eu.europeana.metis.schema.jibx.Note; -import eu.europeana.metis.schema.jibx.PlaceOfBirth; -import eu.europeana.metis.schema.jibx.PlaceOfDeath; -import eu.europeana.metis.schema.jibx.PlaceType; -import eu.europeana.metis.schema.jibx.PrefLabel; -import eu.europeana.metis.schema.jibx.ProfessionOrOccupation; import eu.europeana.metis.schema.jibx.RDF; -import eu.europeana.metis.schema.jibx.Related; -import eu.europeana.metis.schema.jibx.RelatedMatch; -import eu.europeana.metis.schema.jibx.SameAs; -import eu.europeana.metis.schema.jibx.TimeSpanType; -import eu.europeana.metis.schema.jibx._Long; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -79,280 +38,6 @@ */ public class EntityMergeEngine { - private static PlaceType convertPlace(Place place) { - - PlaceType placeType = new PlaceType(); - - // about - ItemExtractorUtils.setAbout(place, placeType); - - // alt - if (place.getAlt() != null) { - Alt alt = new Alt(); - alt.setAlt(Float.valueOf(place.getAlt())); - placeType.setAlt(alt); - } - - // altlabels - placeType - .setAltLabelList(ItemExtractorUtils.extractLabels(place.getAltLabelList(), AltLabel::new)); - - // hasPartList - placeType.setHasPartList( - ItemExtractorUtils.extractLabelResources(place.getHasPartsList(), HasPart::new)); - - // isPartOf - if (place.getIsPartOf() != null) { - placeType.setIsPartOfList( - ItemExtractorUtils.extractLabelResources(place.getIsPartOf(), IsPartOf::new)); - } - - // lat - if (place.getLat() != null) { - Lat lat = new Lat(); - lat.setLat(Float.valueOf(place.getLat())); - placeType.setLat(lat); - } - - // _long - if (place.getLon() != null) { - _Long longitude = new _Long(); - longitude.setLong(Float.valueOf(place.getLon())); - placeType.setLong(longitude); - } - - // noteList - placeType.setNoteList(ItemExtractorUtils.extractLabels(place.getNotes(), Note::new)); - - // prefLabelList - placeType.setPrefLabelList( - ItemExtractorUtils.extractLabels(place.getPrefLabelList(), PrefLabel::new)); - - // sameAsList - placeType.setSameAList(ItemExtractorUtils.extractResources(place.getSameAs(), SameAs::new)); - - // isNextInSequence: not available - - // Done - return placeType; - } - - private static AgentType convertAgent(Agent agent) { - - AgentType agentType = new AgentType(); - - // about - ItemExtractorUtils.setAbout(agent, agentType); - - // altLabelList - agentType - .setAltLabelList(ItemExtractorUtils.extractLabels(agent.getAltLabelList(), AltLabel::new)); - - // begin - agentType.setBegin(ItemExtractorUtils.extractFirstLabel(agent.getBeginList(), Begin::new)); - - // biographicalInformation - agentType.setBiographicalInformationList(ItemExtractorUtils - .extractLabelResources(agent.getBiographicalInformation(), - BiographicalInformation::new)); - agentType.setProfessionOrOccupationList(ItemExtractorUtils - .extractLabelResources(agent.getProfessionOrOccupation(), ProfessionOrOccupation::new)); - - // dateList - agentType.setDateList(ItemExtractorUtils.extractLabelResources(agent.getDate(), Date::new)); - - // placeOfBirth - agentType.setPlaceOfBirthList( - ItemExtractorUtils.extractLabelResources(agent.getPlaceOfBirth(), PlaceOfBirth::new)); - - // placeOfDeath - agentType.setPlaceOfDeathList( - ItemExtractorUtils.extractLabelResources(agent.getPlaceOfDeath(), PlaceOfDeath::new)); - - // dateOfBirth - agentType.setDateOfBirth( - ItemExtractorUtils.extractFirstLabel(agent.getDateOfBirth(), DateOfBirth::new)); - - // dateofDeath - agentType.setDateOfDeath( - ItemExtractorUtils.extractFirstLabel(agent.getDateOfDeath(), DateOfDeath::new)); - - // dateOfEstablishment - agentType.setDateOfEstablishment(ItemExtractorUtils - .extractFirstLabel(agent.getDateOfEstablishment(), DateOfEstablishment::new)); - - // dateofTermination - agentType.setDateOfTermination( - ItemExtractorUtils.extractFirstLabel(agent.getDateOfTermination(), DateOfTermination::new)); - - // end - agentType.setEnd(ItemExtractorUtils.extractFirstLabel(agent.getEndList(), End::new)); - - // gender - agentType.setGender(ItemExtractorUtils.extractFirstLabel(agent.getGender(), Gender::new)); - - // hasMetList - agentType.setHasMetList(ItemExtractorUtils.extractResources(agent.getHasMet(), HasMet::new)); - - // hasPartList: not available - - // identifierList - agentType.setIdentifierList( - ItemExtractorUtils.extractLabels(agent.getIdentifier(), Identifier::new)); - - // isPartOfList: not available - - // isRelatedToList - agentType.setIsRelatedToList( - ItemExtractorUtils.extractLabelResources(agent.getIsRelatedTo(), IsRelatedTo::new)); - - // nameList: not available - - // noteList - agentType.setNoteList(ItemExtractorUtils.extractLabels(agent.getNotes(), Note::new)); - - // prefLabelList - agentType.setPrefLabelList( - ItemExtractorUtils.extractLabels(agent.getPrefLabelList(), PrefLabel::new)); - - // professionOrOccupationList - agentType.setProfessionOrOccupationList(ItemExtractorUtils - .extractLabelResources(agent.getProfessionOrOccupation(), ProfessionOrOccupation::new)); - - // sameAsList - agentType.setSameAList(ItemExtractorUtils.extractResources(agent.getSameAs(), SameAs::new)); - - return agentType; - } - - private static eu.europeana.metis.schema.jibx.Concept convertConcept(Concept baseConcept) { - - eu.europeana.metis.schema.jibx.Concept concept = new eu.europeana.metis.schema.jibx.Concept(); - - // about - ItemExtractorUtils.setAbout(baseConcept, concept); - - // choiceList - final List choices = new ArrayList<>(); - - final List altLabels = ItemExtractorUtils - .extractLabels(baseConcept.getAltLabelList(), AltLabel::new); - ItemExtractorUtils.toChoices(altLabels, Choice::setAltLabel, choices); - - final List broadMatches = ItemExtractorUtils - .extractResources(baseConcept.getBroadMatch(), BroadMatch::new); - ItemExtractorUtils.toChoices(broadMatches, Choice::setBroadMatch, choices); - - final List broaders = ItemExtractorUtils - .extractResources(baseConcept.getBroader(), Broader::new); - ItemExtractorUtils.toChoices(broaders, Choice::setBroader, choices); - - final List closeMatches = ItemExtractorUtils - .extractResources(baseConcept.getCloseMatch(), CloseMatch::new); - ItemExtractorUtils.toChoices(closeMatches, Choice::setCloseMatch, choices); - - final List exactMatches = ItemExtractorUtils - .extractResources(baseConcept.getExactMatch(), ExactMatch::new); - ItemExtractorUtils.toChoices(exactMatches, Choice::setExactMatch, choices); - - final List inSchemes = ItemExtractorUtils - .extractResources(baseConcept.getInScheme(), InScheme::new); - ItemExtractorUtils.toChoices(inSchemes, Choice::setInScheme, choices); - - final List narrowers = ItemExtractorUtils - .extractResources(baseConcept.getNarrower(), Narrower::new); - ItemExtractorUtils.toChoices(narrowers, Choice::setNarrower, choices); - - final List narrowMatches = ItemExtractorUtils - .extractResources(baseConcept.getNarrowMatch(), NarrowMatch::new); - ItemExtractorUtils.toChoices(narrowMatches, Choice::setNarrowMatch, choices); - - final List notations = ItemExtractorUtils - .extractLabels(baseConcept.getNotation(), Notation::new); - ItemExtractorUtils.toChoices(notations, Choice::setNotation, choices); - - final List notes = ItemExtractorUtils.extractLabels(baseConcept.getNotes(), Note::new); - ItemExtractorUtils.toChoices(notes, Choice::setNote, choices); - - final List prefLabels = ItemExtractorUtils - .extractLabels(baseConcept.getPrefLabelList(), PrefLabel::new); - ItemExtractorUtils.toChoices(prefLabels, Choice::setPrefLabel, choices); - - final List relateds = ItemExtractorUtils - .extractResources(baseConcept.getRelated(), Related::new); - ItemExtractorUtils.toChoices(relateds, Choice::setRelated, choices); - - final List relatedMatches = ItemExtractorUtils - .extractResources(baseConcept.getRelatedMatch(), RelatedMatch::new); - ItemExtractorUtils.toChoices(relatedMatches, Choice::setRelatedMatch, choices); - - concept.setChoiceList(choices); - - return concept; - } - - private static TimeSpanType convertTimeSpan(TimeSpan timespan) { - - TimeSpanType timeSpanType = new TimeSpanType(); - - // about - ItemExtractorUtils.setAbout(timespan, timeSpanType); - - // altLabelList - timeSpanType.setAltLabelList( - ItemExtractorUtils.extractLabels(timespan.getAltLabelList(), AltLabel::new)); - - // begin - timeSpanType.setBegin(ItemExtractorUtils.extractLabel(timespan.getBegin(), Begin::new)); - - // end - timeSpanType.setEnd(ItemExtractorUtils.extractLabel(timespan.getEnd(), End::new)); - - // hasPartList - timeSpanType.setHasPartList( - ItemExtractorUtils.extractLabelResources(timespan.getHasPartsList(), HasPart::new)); - - // isNextInSequence - if (timespan.getIsNextInSequence() != null) { - timeSpanType.setIsNextInSequence(ItemExtractorUtils - .extractAsResource(timespan.getIsNextInSequence(), IsNextInSequence::new, - Part::getResource)); - } - - // isPartOf - if (timespan.getIsPartOf() != null) { - timeSpanType.setIsPartOfList( - ItemExtractorUtils.extractLabelResources(timespan.getIsPartOf(), IsPartOf::new)); - } - - // noteList - timeSpanType.setNoteList(ItemExtractorUtils.extractLabels(timespan.getNotes(), Note::new)); - - // prefLabelList - timeSpanType.setPrefLabelList( - ItemExtractorUtils.extractLabels(timespan.getPrefLabelList(), PrefLabel::new)); - - // sameAsList - timeSpanType - .setSameAList(ItemExtractorUtils.extractResources(timespan.getSameAs(), SameAs::new)); - - // hiddenLabelList - timeSpanType.setHiddenLabelList( - ItemExtractorUtils.extractLabels(timespan.getHiddenLabel(), HiddenLabel::new)); - - // done - return timeSpanType; - } - - private static eu.europeana.metis.schema.jibx.Organization convertOrganization( - Organization organization) { - final eu.europeana.metis.schema.jibx.Organization organizationType = new eu.europeana.metis.schema.jibx.Organization(); - organizationType.setAbout(organization.getAbout()); - organizationType.setPrefLabelList( - ItemExtractorUtils.extractLabels(organization.getPrefLabelList(), PrefLabel::new)); - return organizationType; - } - private static T convertAndAddEntity( I inputEntity, Function converter, Supplier> listGetter, Consumer> listSetter) { @@ -375,27 +60,25 @@ private static T convertAndAddEn return convertedEntity; } + /** + * Converts a given enrichment entity to jibx entity and adds it to the provided rdf. + * @param rdf the rdf + * @param enrichmentBase the enrichment entity + * @return the converted entity + */ public static AboutType convertAndAddEntity(RDF rdf, EnrichmentBase enrichmentBase) { - - // Convert the entity and add it to the RDF. - final AboutType entity; - if (enrichmentBase instanceof Place place) { - entity = convertAndAddEntity(place, EntityMergeEngine::convertPlace, rdf::getPlaceList, rdf::setPlaceList); - } else if (enrichmentBase instanceof Agent agent) { - entity = convertAndAddEntity(agent, EntityMergeEngine::convertAgent, rdf::getAgentList, rdf::setAgentList); - } else if (enrichmentBase instanceof Concept concept) { - entity = convertAndAddEntity(concept, EntityMergeEngine::convertConcept, rdf::getConceptList, rdf::setConceptList); - } else if (enrichmentBase instanceof TimeSpan timeSpan) { - entity = convertAndAddEntity(timeSpan, EntityMergeEngine::convertTimeSpan, rdf::getTimeSpanList, rdf::setTimeSpanList); - } else if (enrichmentBase instanceof Organization organization) { - entity = convertAndAddEntity(organization, - EntityMergeEngine::convertOrganization, rdf::getOrganizationList, + return switch (enrichmentBase) { + case Place place -> convertAndAddEntity(place, EntityConverterUtils::convertPlace, rdf::getPlaceList, rdf::setPlaceList); + case Agent agent -> convertAndAddEntity(agent, EntityConverterUtils::convertAgent, rdf::getAgentList, rdf::setAgentList); + case Concept concept -> + convertAndAddEntity(concept, EntityConverterUtils::convertConcept, rdf::getConceptList, rdf::setConceptList); + case TimeSpan timeSpan -> + convertAndAddEntity(timeSpan, EntityConverterUtils::convertTimeSpan, rdf::getTimeSpanList, rdf::setTimeSpanList); + case Organization organization -> convertAndAddEntity(organization, + EntityConverterUtils::convertOrganization, rdf::getOrganizationList, rdf::setOrganizationList); - } else { - throw new IllegalArgumentException("Unknown entity type: " + enrichmentBase.getClass()); - } - - return entity; + case null, default -> throw new IllegalArgumentException("Unknown entity type: " + enrichmentBase.getClass()); + }; } /** diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java index 39a98fbe08..aca31df1b3 100644 --- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/RdfEntityUtils.java @@ -105,7 +105,7 @@ public static void replaceResourceWithLinkInAggregation(RDF rdf, List referenceTerm.getReference().toString())) .forEach(resourceOrLiteralType -> { final Resource resource = new Resource(); - resource.setResource(listOfAboutTypes.get(0).getAbout()); + resource.setResource(listOfAboutTypes.getFirst().getAbout()); resourceOrLiteralType.setResource(resource); }); } diff --git a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/YearParser.java b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/YearParser.java index 3534cf90af..2b842f9a86 100644 --- a/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/YearParser.java +++ b/metis-enrichment/metis-enrichment-client/src/main/java/eu/europeana/enrichment/utils/YearParser.java @@ -1,6 +1,6 @@ package eu.europeana.enrichment.utils; -import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.List; @@ -71,7 +71,7 @@ private static List createEraPatterns(String filePath) { readLines = IOUtils .readLines(Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath), StandardCharsets.UTF_8.name()); - } catch (IOException e) { + } catch (UncheckedIOException e) { LOGGER.error("Problem reading file '" + filePath + "'", e); return Collections.emptyList(); } diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java index 8bcce9fe78..819c717efe 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/EnrichmentWorkerImplTest.java @@ -1,5 +1,21 @@ package eu.europeana.enrichment.rest.client; +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + import com.github.tomakehurst.wiremock.WireMockServer; import com.github.tomakehurst.wiremock.common.ConsoleNotifier; import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer; @@ -18,6 +34,10 @@ import eu.europeana.enrichment.rest.client.report.Type; import eu.europeana.metis.schema.convert.SerializationException; import eu.europeana.metis.schema.jibx.RDF; +import java.io.IOException; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Stream; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -28,27 +48,6 @@ import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; -import java.io.IOException; -import java.util.Set; -import java.util.TreeSet; -import java.util.stream.Stream; - -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - class EnrichmentWorkerImplTest { private static final Logger LOGGER = LoggerFactory.getLogger(EnrichmentWorkerImplTest.class); @@ -355,7 +354,7 @@ void testEnrichmentWorkerHappyFlow(String inputRecord, RecordStatus recordStatus } @Test - void testEnrichmentWorkerHappyFlow() throws DereferenceException, EnrichmentException { + void testEnrichmentWorkerHappyFlow() { TreeSet modeSetWithOnlyEnrichment = new TreeSet<>(); TreeSet modeSetWithOnlyDereference = new TreeSet<>(); TreeSet modeSetWithBoth = new TreeSet<>(); @@ -370,7 +369,7 @@ void testEnrichmentWorkerHappyFlow() throws DereferenceException, EnrichmentExce } @Test - void testEnrichmentWorkerNullFlow() throws DereferenceException, EnrichmentException { + void testEnrichmentWorkerNullFlow() { TreeSet modeSetWithOnlyEnrichment = new TreeSet<>(); TreeSet modeSetWithOnlyDereference = new TreeSet<>(); TreeSet modeSetWithBoth = new TreeSet<>(); @@ -384,8 +383,7 @@ void testEnrichmentWorkerNullFlow() throws DereferenceException, EnrichmentExcep testEnrichmentWorkerNullFlow(modeSetWithBoth); } - private void testEnrichmentWorkerHappyFlow(Set modes) - throws DereferenceException, EnrichmentException { + private void testEnrichmentWorkerHappyFlow(Set modes) { // Create enricher and mock it. final Enricher enricher = mock(EnricherImpl.class); @@ -404,11 +402,9 @@ private void testEnrichmentWorkerHappyFlow(Set modes) // Check the performed tasks verifyDereferencingHappyFlow(doDereferencing, dereferencer, inputRdf); verifyEnrichmentHappyFlow(doEnrichment, enricher, inputRdf); - // verifyMergeHappyFlow(doEnrichment, doDereferencing, entityMergeEngine); } - private void testEnrichmentWorkerNullFlow(Set modes) - throws DereferenceException, EnrichmentException { + private void testEnrichmentWorkerNullFlow(Set modes) { // Create enrichment worker and mock the enrichment and dereferencing results. final Enricher enricher = mock(EnricherImpl.class); @@ -432,8 +428,7 @@ private void testEnrichmentWorkerNullFlow(Set modes) } // Verify dereference related calls - private void verifyDereferencingHappyFlow(boolean doDereferencing, Dereferencer dereferencer, - RDF inputRdf) throws DereferenceException { + private void verifyDereferencingHappyFlow(boolean doDereferencing, Dereferencer dereferencer, RDF inputRdf) { if (doDereferencing) { verify(dereferencer, times(1)).dereference(inputRdf); @@ -442,8 +437,7 @@ private void verifyDereferencingHappyFlow(boolean doDereferencing, Dereferencer } } - private void verifyDereferencingNullFlow(boolean doDereferencing, Dereferencer dereferencer, - RDF inputRdf) throws DereferenceException { + private void verifyDereferencingNullFlow(boolean doDereferencing, Dereferencer dereferencer, RDF inputRdf) { if (doDereferencing) { verify(dereferencer, times(1)).dereference(inputRdf); @@ -454,8 +448,7 @@ private void verifyDereferencingNullFlow(boolean doDereferencing, Dereferencer d } // Verify enrichment related calls - private void verifyEnrichmentHappyFlow(boolean doEnrichment, Enricher enricher, - RDF inputRdf) throws EnrichmentException { + private void verifyEnrichmentHappyFlow(boolean doEnrichment, Enricher enricher, RDF inputRdf) { if (doEnrichment) { verify(enricher, times(1)).enrichment(inputRdf); @@ -464,8 +457,7 @@ private void verifyEnrichmentHappyFlow(boolean doEnrichment, Enricher enricher, } } - private void verifyEnrichmentNullFlow(boolean doEnrichment, Enricher worker, RDF inputRdf) - throws EnrichmentException { + private void verifyEnrichmentNullFlow(boolean doEnrichment, Enricher worker, RDF inputRdf) { if (doEnrichment) { verify(worker, times(1)).enrichment(inputRdf); diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferenceClientTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferenceClientTest.java index 29f976970d..3a2913a19a 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferenceClientTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferenceClientTest.java @@ -61,10 +61,10 @@ void testDereference() { eq(EnrichmentResultList.class)); assertEquals( - res.getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0).getAbout(), + res.getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().get(0).getAbout(), agent1.getAbout()); assertEquals( - res.getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(1).getAbout(), + res.getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().get(1).getAbout(), agent2.getAbout()); } } diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java index 303de82b52..32ad41bf07 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/dereference/DereferencerImplTest.java @@ -1,5 +1,23 @@ package eu.europeana.enrichment.rest.client.dereference; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anySet; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + import com.github.tomakehurst.wiremock.WireMockServer; import com.github.tomakehurst.wiremock.common.ConsoleNotifier; import com.github.tomakehurst.wiremock.http.JvmProxyConfigurer; @@ -23,26 +41,12 @@ import eu.europeana.metis.schema.jibx.Concept; import eu.europeana.metis.schema.jibx.PlaceType; import eu.europeana.metis.schema.jibx.RDF; -import java.net.URI; -import java.net.URISyntaxException; -import org.apache.commons.collections.CollectionUtils; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.mockito.ArgumentCaptor; -import org.springframework.http.HttpStatus; -import org.springframework.web.client.HttpClientErrorException; -import org.springframework.web.client.HttpServerErrorException; - import jakarta.ws.rs.NotFoundException; import jakarta.ws.rs.ServiceUnavailableException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; -import java.net.URL; +import java.net.URI; +import java.net.URISyntaxException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; @@ -55,24 +59,18 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.ok; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anySet; -import static org.mockito.Mockito.anyString; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; +import org.apache.commons.collections.CollectionUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.ArgumentCaptor; +import org.springframework.http.HttpStatus; +import org.springframework.web.client.HttpClientErrorException; +import org.springframework.web.client.HttpServerErrorException; /** * Unit tests for {@link DereferencerImpl} class @@ -166,7 +164,7 @@ void testDereferencerHappyFlow() throws MalformedURLException, URISyntaxExceptio final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class); doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet()); final DereferenceClient dereferenceClient = mock(DereferenceClient.class); - doReturn(DEREFERENCE_RESULT.get(0), + doReturn(DEREFERENCE_RESULT.getFirst(), DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient) .dereference(any()); final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class); @@ -218,7 +216,7 @@ void testDereferenceInvalidUrl() throws MalformedURLException, URISyntaxExceptio final ClientEntityResolver clientEntityResolver = mock(ClientEntityResolver.class); doReturn(ENRICHMENT_RESULT).when(clientEntityResolver).resolveByText(anySet()); final DereferenceClient dereferenceClient = mock(DereferenceClient.class); - doReturn(DEREFERENCE_RESULT.get(0), + doReturn(DEREFERENCE_RESULT.getFirst(), DEREFERENCE_RESULT.subList(1, DEREFERENCE_RESULT.size()).toArray()).when(dereferenceClient) .dereference(any()); final EntityMergeEngine entityMergeEngine = mock(EntityMergeEngine.class); @@ -383,9 +381,9 @@ private void verifyMergeNullFlow(EntityMergeEngine entityMergeEngine) { verify(entityMergeEngine, times(1)) .mergeReferenceEntitiesFromDereferencedEntities(any(), argumentCaptor.capture()); assertEquals(1, argumentCaptor.getValue().size()); - assertNull(argumentCaptor.getValue().get(0).getClassType()); - assertTrue(argumentCaptor.getValue().get(0).getReferenceTermListMap().isEmpty()); - assertTrue(argumentCaptor.getValue().get(0).getReportMessages().isEmpty()); + assertNull(argumentCaptor.getValue().getFirst().getClassType()); + assertTrue(argumentCaptor.getValue().getFirst().getReferenceTermListMap().isEmpty()); + assertTrue(argumentCaptor.getValue().getFirst().getReportMessages().isEmpty()); } private List prepareExpectedList() throws MalformedURLException, URISyntaxException { @@ -404,9 +402,9 @@ private List prepareExpectedList() throws MalformedURLExce ReferenceTermImpl expectedReferenceTerm2 = new ReferenceTermImpl(new URI("http://valid-example.host/place").toURL()); List expectedEnrichmentBaseList2 = new ArrayList<>(); expectedEnrichmentBaseList2.add( - DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0)); + DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().getFirst()); expectedEnrichmentBaseList2.add( - DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(1)); + DEREFERENCE_RESULT.get(2).getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().get(1)); expectedEnrichmentBaseList2.add(null); DereferencedEntities expectedDereferencedEntities2 = new DereferencedEntities( Map.of(expectedReferenceTerm2, expectedEnrichmentBaseList2), @@ -415,10 +413,10 @@ private List prepareExpectedList() throws MalformedURLExce ReferenceTermImpl expectedReferenceTerm3 = new ReferenceTermImpl(new URI("http://valid-example.host/about").toURL()); List expectedEnrichmentBaseList3 = new ArrayList<>(); expectedEnrichmentBaseList3.add( - DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(0)); + DEREFERENCE_RESULT.getFirst().getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().get(0)); expectedEnrichmentBaseList3.add(null); expectedEnrichmentBaseList3.add( - DEREFERENCE_RESULT.get(0).getEnrichmentBaseResultWrapperList().get(0).getEnrichmentBaseList().get(2)); + DEREFERENCE_RESULT.getFirst().getEnrichmentBaseResultWrapperList().getFirst().getEnrichmentBaseList().get(2)); DereferencedEntities expectedDereferencedEntities3 = new DereferencedEntities( Map.of(expectedReferenceTerm3, expectedEnrichmentBaseList3), Collections.emptySet(), AboutType.class); diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImplTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImplTest.java index 3557fcf49d..8ed5aea16f 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImplTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/EnricherImplTest.java @@ -5,6 +5,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; @@ -12,10 +13,12 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import eu.europeana.enrichment.api.external.impl.ClientEntityResolver; import eu.europeana.enrichment.api.external.model.EnrichmentBase; import eu.europeana.enrichment.api.external.model.Place; +import eu.europeana.enrichment.api.internal.AggregationFieldType; import eu.europeana.enrichment.api.internal.ProxyFieldType; import eu.europeana.enrichment.api.internal.RecordParser; import eu.europeana.enrichment.api.internal.ReferenceTermContext; @@ -205,6 +208,29 @@ void testEnrichReferenceWarnFlow() throws MalformedURLException, URISyntaxExcept assertEquals(getExpectedReportMessagesWarning2Flow(), enrichReferences.getRight()); } + @Test + void testEnrichReferenceWarnFlowForOrganization() { + // Given the mocks + final RecordParser recordParser = Mockito.mock(RecordParser.class); + final ClientEntityResolver entityResolver = Mockito.mock(ClientEntityResolver.class); + final EntityMergeEngine entityMergeEngine = Mockito.mock(EntityMergeEngine.class); + + // When the enricher a 301 + final Enricher enricher = spy(new EnricherImpl(recordParser, entityResolver, entityMergeEngine)); + + SearchTermContext searchTermContext = new SearchTermContext("organization1", "language1", Set.of( + AggregationFieldType.DATA_PROVIDER)); + + when(entityResolver.resolveByText(anySet())).thenReturn(Map.of(searchTermContext, Collections.emptyList())); + + Pair>, Set> enrichReferences = enricher.enrichValues( + Set.of(searchTermContext)); + + // Then verify + assertNotNull(enrichReferences); + assertEquals(getExpectedReportMessagesWarningForOrganization(), enrichReferences.getRight()); + } + @Test void testEnrichReferenceExceptionFlow() throws MalformedURLException, URISyntaxException { // Given the mocks @@ -391,6 +417,16 @@ private HashSet getExpectedReportMessagesWarning1Flow() { return reports; } + private HashSet getExpectedReportMessagesWarningForOrganization() { + HashSet reports = new HashSet<>(); + reports.add(Report + .buildEnrichmentWarn() + .withMessage("Could not find an entity for the given search term with type Organization.") + .withValue("organization1") + .build()); + return reports; + } + private HashSet getExpectedReportMessagesError1Flow() { HashSet reports = new HashSet<>(); reports.add(Report diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParserTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParserTest.java index 6cf4939871..2c26b5d839 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParserTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/rest/client/enrichment/MetisRecordParserTest.java @@ -30,12 +30,12 @@ import org.apache.commons.io.IOUtils; import org.junit.jupiter.api.Test; -public class MetisRecordParserTest { +class MetisRecordParserTest { private static final RdfConversionUtils rdfConversionUtils = new RdfConversionUtils(); @Test - public void testExtractedFieldValuesForEnrichment() { + void testExtractedFieldValuesForEnrichment() { RDF rdf = new RDF(); ProxyType proxy = new ProxyType(); ArrayList choiceList = new ArrayList<>(); @@ -178,7 +178,7 @@ public void testExtractedFieldValuesForEnrichment() { } @Test - public void testSetAdditionalData() throws Exception { + void testSetAdditionalData() throws Exception { String xml = IOUtils .toString(getClass().getClassLoader().getResourceAsStream("sample_completeness.rdf"), StandardCharsets.UTF_8); RDF rdf = rdfConversionUtils.convertStringToRdf(xml); diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EnrichmentUtilsTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EnrichmentUtilsTest.java index 1d2b8e1fea..b362ef2c66 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EnrichmentUtilsTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EnrichmentUtilsTest.java @@ -20,18 +20,16 @@ import eu.europeana.metis.schema.jibx._Object; import java.util.ArrayList; import java.util.Collections; - import java.util.List; -import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -public class EnrichmentUtilsTest { +class EnrichmentUtilsTest { - private final static RDF TEST_RDF = spy(new RDF()); - private final static ProxyType PROXY_EUROPEANA = new ProxyType(); - private final static ProxyType PROXY_PROVIDER = new ProxyType(); - private final static EuropeanaAggregationType EUROPEANA_AGGREGATION_TYPE = new EuropeanaAggregationType(); + private static final RDF TEST_RDF = spy(new RDF()); + private static final ProxyType PROXY_EUROPEANA = new ProxyType(); + private static final ProxyType PROXY_PROVIDER = new ProxyType(); + private static final EuropeanaAggregationType EUROPEANA_AGGREGATION_TYPE = new EuropeanaAggregationType(); @BeforeEach void setUp() { @@ -95,8 +93,7 @@ void testSetAdditionalDataAllYearFieldValues(){ EnrichmentUtils.setAdditionalData(TEST_RDF); ProxyType proxyResult = TEST_RDF.getProxyList().stream() - .filter(x -> x.getEuropeanaProxy().isEuropeanaProxy()) - .collect(Collectors.toList()).get(0); + .filter(x -> x.getEuropeanaProxy().isEuropeanaProxy()).toList().getFirst(); assertEquals(4, proxyResult.getYearList().size()); assertEquals("1990", proxyResult.getYearList().get(0).getString()); @@ -135,10 +132,10 @@ void testSetAdditionalDataYearFieldWithoutDuplicates(){ ProxyType proxyResult = TEST_RDF.getProxyList().stream() .filter(x -> x.getEuropeanaProxy().isEuropeanaProxy()) - .collect(Collectors.toList()).get(0); + .toList().getFirst(); assertEquals(1, proxyResult.getYearList().size()); - assertEquals("1990", proxyResult.getYearList().get(0).getString()); + assertEquals("1990", proxyResult.getYearList().getFirst().getString()); } @@ -153,7 +150,7 @@ void testSetAdditionalDataCompletenessNone(){ EnrichmentUtils.setAdditionalData(TEST_RDF); - EuropeanaAggregationType aggregationTypeResult = TEST_RDF.getEuropeanaAggregationList().get(0); + EuropeanaAggregationType aggregationTypeResult = TEST_RDF.getEuropeanaAggregationList().getFirst(); assertEquals("0", aggregationTypeResult.getCompleteness().getString()); } @@ -192,7 +189,7 @@ void testSetAdditionalDataCompletenessMoreThanZero() { EnrichmentUtils.setAdditionalData(TEST_RDF); - EuropeanaAggregationType aggregationTypeResult = TEST_RDF.getEuropeanaAggregationList().get(0); + EuropeanaAggregationType aggregationTypeResult = TEST_RDF.getEuropeanaAggregationList().getFirst(); assertTrue(Integer.parseInt(aggregationTypeResult.getCompleteness().getString()) > 0); } @@ -217,6 +214,6 @@ void testSetAdditionalDataEmptyProxies() { EnrichmentUtils.setAdditionalData(newRdf); assertArrayEquals(toCompare.getProxyList().toArray(), newRdf.getProxyList().toArray()); - assertEquals(toCompare.getEuropeanaAggregationList().get(0), newRdf.getEuropeanaAggregationList().get(0)); + assertEquals(toCompare.getEuropeanaAggregationList().getFirst(), newRdf.getEuropeanaAggregationList().getFirst()); } } diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EntityMergeEngineTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EntityMergeEngineTest.java index e90336d713..70547346ca 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EntityMergeEngineTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/EntityMergeEngineTest.java @@ -622,12 +622,12 @@ void testMergeOtherTypes() throws SerializationException { verifyRdf(rdf, 2, 2, 0, 1, 1); // Verify content - verifyAgent((Agent) inputList.get(0), rdf.getAgentList().get(0)); - verifyConcept((Concept) inputList.get(1), rdf.getConceptList().get(0)); - verifyTimespan((TimeSpan) inputList.get(2), rdf.getTimeSpanList().get(0)); + verifyAgent((Agent) inputList.get(0), rdf.getAgentList().getFirst()); + verifyConcept((Concept) inputList.get(1), rdf.getConceptList().getFirst()); + verifyTimespan((TimeSpan) inputList.get(2), rdf.getTimeSpanList().getFirst()); verifyAgent((Agent) inputList.get(3), rdf.getAgentList().get(1)); verifyConcept((Concept) inputList.get(4), rdf.getConceptList().get(1)); - verifyOrganization((Organization) inputList.get(5), rdf.getOrganizationList().get(0)); + verifyOrganization((Organization) inputList.get(5), rdf.getOrganizationList().getFirst()); // Convert RDF to string as extra test that everything is OK. rdfConversionUtils.convertRdfToString(rdf); diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/ItemExtractorUtilsTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/ItemExtractorUtilsTest.java index 4ca1c8cd7e..9db9add02c 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/ItemExtractorUtilsTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/ItemExtractorUtilsTest.java @@ -1,6 +1,7 @@ package eu.europeana.enrichment.utils; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -20,10 +21,9 @@ import eu.europeana.metis.schema.jibx.ResourceType; import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; import org.junit.jupiter.api.Test; -public class ItemExtractorUtilsTest { +class ItemExtractorUtilsTest { @Test void testSetAbout() { @@ -74,11 +74,10 @@ void testExtractLabels() { List output = ItemExtractorUtils.extractLabels(labels, LiteralType::new); for (Label label : labels) { - List result = output.stream().filter(x -> x.getString().equals(label.getValue())).collect( - Collectors.toList()); + List result = output.stream().filter(x -> x.getString().equals(label.getValue())).toList(); assertEquals(1, result.size()); - assertEquals(label.getLang(), result.get(0).getLang().getLang()); + assertEquals(label.getLang(), result.getFirst().getLang().getLang()); } } @@ -188,16 +187,6 @@ void testExtractLabelReturnEmpty() { assertNull(output.getLang()); } - @Test - void testExtractLabelToResourceOrLiteralEmpty() { - Label label = new Label(null); - LiteralType output = ItemExtractorUtils.extractLabel(label, LiteralType::new); - - assertNotNull(output); - assertEquals("", output.getString()); - assertNull(output.getLang()); - } - @Test void testExtractLabelResource() { LabelResource label = new LabelResource("lang1", "value1"); @@ -327,14 +316,12 @@ void testToChoices() { altLabels.add(label3); ItemExtractorUtils.toChoices(altLabels, Choice::setAltLabel, choices); - - assertTrue(choices.size() > 0); + + assertFalse(choices.isEmpty()); for (AltLabel label : altLabels) { - List result = choices.stream().filter(x -> x.getAltLabel().equals(label)).collect( - Collectors.toList()); - - assertTrue(result.size() > 0); + List result = choices.stream().filter(x -> x.getAltLabel().equals(label)).toList(); + assertFalse(result.isEmpty()); } } } diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/RdfEntityUtilsTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/RdfEntityUtilsTest.java index b111925239..b38d6f0484 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/RdfEntityUtilsTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/RdfEntityUtilsTest.java @@ -25,40 +25,40 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -public class RdfEntityUtilsTest { +class RdfEntityUtilsTest { - private static RDF TEST_RDF; - private static ProxyType PROXY_EUROPEANA; + private RDF testRdf; + private ProxyType proxyEuropeana; @BeforeEach void setUp() { - TEST_RDF = new RDF(); + testRdf = new RDF(); EuropeanaProxy europeanaProxy = new EuropeanaProxy(); europeanaProxy.setEuropeanaProxy(true); - PROXY_EUROPEANA = new ProxyType(); - PROXY_EUROPEANA.setAbout("/proxy/europeana/260/_kmo_av_sid_45006"); - PROXY_EUROPEANA.setEuropeanaProxy(europeanaProxy); + proxyEuropeana = new ProxyType(); + proxyEuropeana.setAbout("/proxy/europeana/260/_kmo_av_sid_45006"); + proxyEuropeana.setEuropeanaProxy(europeanaProxy); } @Test void testAppendLinkToEuropeanaProxy() { - TEST_RDF.setProxyList(Collections.singletonList(PROXY_EUROPEANA)); + testRdf.setProxyList(Collections.singletonList(proxyEuropeana)); String link = "http://dummylink.com"; Set linkTypes = new HashSet<>(); linkTypes.add(ProxyFieldType.DC_COVERAGE); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, link, linkTypes); + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, link, linkTypes); - assertEquals(1, TEST_RDF.getProxyList().get(0).getChoiceList().size()); + assertEquals(1, testRdf.getProxyList().getFirst().getChoiceList().size()); assertEquals(link, - TEST_RDF.getProxyList().get(0).getChoiceList().get(0).getCoverage().getResource() - .getResource()); + testRdf.getProxyList().getFirst().getChoiceList().getFirst().getCoverage().getResource() + .getResource()); } @Test @@ -73,23 +73,23 @@ void testAppendLinkToEuropeanaProxyAddSameChoiceType() { List choices = new ArrayList<>(); choices.add(choice); - PROXY_EUROPEANA.setChoiceList(choices); + proxyEuropeana.setChoiceList(choices); - TEST_RDF.setProxyList(Collections.singletonList(PROXY_EUROPEANA)); + testRdf.setProxyList(Collections.singletonList(proxyEuropeana)); String link = "http://dummylink.com"; Set linkTypes = new HashSet<>(); linkTypes.add(ProxyFieldType.DC_COVERAGE); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, link, linkTypes); + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, link, linkTypes); - assertEquals(2, TEST_RDF.getProxyList().get(0).getChoiceList().size()); + assertEquals(2, testRdf.getProxyList().getFirst().getChoiceList().size()); assertEquals("http://differentdummylink.com", - TEST_RDF.getProxyList().get(0).getChoiceList().get(0).getCoverage().getResource() - .getResource()); + testRdf.getProxyList().getFirst().getChoiceList().get(0).getCoverage().getResource() + .getResource()); assertEquals(link, - TEST_RDF.getProxyList().get(0).getChoiceList().get(1).getCoverage().getResource() - .getResource()); + testRdf.getProxyList().getFirst().getChoiceList().get(1).getCoverage().getResource() + .getResource()); } @@ -107,19 +107,18 @@ void testAppendLinkToEuropeanaProxyAlreadyExists() { List choices = new ArrayList<>(); choices.add(choice); - PROXY_EUROPEANA.setChoiceList(choices); + proxyEuropeana.setChoiceList(choices); - TEST_RDF.setProxyList(Collections.singletonList(PROXY_EUROPEANA)); + testRdf.setProxyList(Collections.singletonList(proxyEuropeana)); Set linkTypes = new HashSet<>(); linkTypes.add(ProxyFieldType.DC_COVERAGE); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, link, linkTypes); + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, link, linkTypes); - assertEquals(1, TEST_RDF.getProxyList().get(0).getChoiceList().size()); + assertEquals(1, testRdf.getProxyList().getFirst().getChoiceList().size()); assertEquals(link, - TEST_RDF.getProxyList().get(0).getChoiceList().get(0).getCoverage().getResource() - .getResource()); + testRdf.getProxyList().getFirst().getChoiceList().getFirst().getCoverage().getResource().getResource()); } @@ -135,80 +134,80 @@ void testGetProviderProxy() { proxyProvider.setEuropeanaProxy(providerProxy); ArrayList proxyList = new ArrayList<>(); - proxyList.add(PROXY_EUROPEANA); + proxyList.add(proxyEuropeana); proxyList.add(proxyProvider); - TEST_RDF.setProxyList(proxyList); + testRdf.setProxyList(proxyList); - List output = RdfEntityUtils.getProviderProxies(TEST_RDF); + List output = RdfEntityUtils.getProviderProxies(testRdf); assertNotNull(output); assertEquals(1, output.size()); - assertNotNull(output.get(0)); - assertFalse(output.get(0).getEuropeanaProxy().isEuropeanaProxy()); - assertEquals(proxyProvider, output.get(0)); + assertNotNull(output.getFirst()); + assertFalse(output.getFirst().getEuropeanaProxy().isEuropeanaProxy()); + assertEquals(proxyProvider, output.getFirst()); } @Test void testGetProviderProxyWithoutProvider() { - TEST_RDF.setProxyList(Collections.singletonList(PROXY_EUROPEANA)); - List output = RdfEntityUtils.getProviderProxies(TEST_RDF); + testRdf.setProxyList(Collections.singletonList(proxyEuropeana)); + List output = RdfEntityUtils.getProviderProxies(testRdf); assertNotNull(output); assertTrue(output.isEmpty()); } @Test void testRemoveMatchingEntities() { - TEST_RDF.setProxyList(Collections.singletonList(PROXY_EUROPEANA)); + testRdf.setProxyList(Collections.singletonList(proxyEuropeana)); String agentLink = "http://data.europeana.eu/agent/example1"; String conceptLink = "http://data.europeana.eu/concept/example1"; String placeLink = "http://data.europeana.eu/place/example1"; String timespanLink = "http://data.europeana.eu/timespan/example1"; - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, agentLink, + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, agentLink, Sets.newHashSet(ProxyFieldType.DC_CREATOR)); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, conceptLink, + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, conceptLink, Sets.newHashSet(ProxyFieldType.DC_SUBJECT)); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, placeLink, + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, placeLink, Sets.newHashSet(ProxyFieldType.DC_COVERAGE)); - RdfEntityUtils.appendLinkToEuropeanaProxy(TEST_RDF, timespanLink, + RdfEntityUtils.appendLinkToEuropeanaProxy(testRdf, timespanLink, Sets.newHashSet(ProxyFieldType.DCTERMS_CREATED)); final AgentType agentType = new AgentType(); agentType.setAbout(agentLink); - TEST_RDF.setAgentList(new ArrayList<>(Collections.singleton(agentType))); + testRdf.setAgentList(new ArrayList<>(Collections.singleton(agentType))); final Concept concept = new Concept(); concept.setAbout(conceptLink); - TEST_RDF.setConceptList(new ArrayList<>(Collections.singleton(concept))); + testRdf.setConceptList(new ArrayList<>(Collections.singleton(concept))); final PlaceType placeType = new PlaceType(); placeType.setAbout(placeLink); - TEST_RDF.setPlaceList(new ArrayList<>(Collections.singleton(placeType))); + testRdf.setPlaceList(new ArrayList<>(Collections.singleton(placeType))); final TimeSpanType timeSpanType = new TimeSpanType(); timeSpanType.setAbout(timespanLink); - TEST_RDF.setTimeSpanList(new ArrayList<>(Collections.singleton(timeSpanType))); + testRdf.setTimeSpanList(new ArrayList<>(Collections.singleton(timeSpanType))); - assertEquals(4, TEST_RDF.getProxyList().get(0).getChoiceList().size()); + assertEquals(4, testRdf.getProxyList().getFirst().getChoiceList().size()); assertEquals(agentLink, - TEST_RDF.getProxyList().get(0).getChoiceList().get(0).getCreator().getResource() - .getResource()); - assertEquals(1, TEST_RDF.getAgentList().size()); + testRdf.getProxyList().getFirst().getChoiceList().getFirst().getCreator().getResource() + .getResource()); + assertEquals(1, testRdf.getAgentList().size()); assertEquals(conceptLink, - TEST_RDF.getProxyList().get(0).getChoiceList().get(1).getSubject().getResource() - .getResource()); - assertEquals(1, TEST_RDF.getConceptList().size()); + testRdf.getProxyList().getFirst().getChoiceList().get(1).getSubject().getResource() + .getResource()); + assertEquals(1, testRdf.getConceptList().size()); assertEquals(placeLink, - TEST_RDF.getProxyList().get(0).getChoiceList().get(2).getCoverage().getResource() - .getResource()); - assertEquals(1, TEST_RDF.getPlaceList().size()); + testRdf.getProxyList().getFirst().getChoiceList().get(2).getCoverage().getResource() + .getResource()); + assertEquals(1, testRdf.getPlaceList().size()); assertEquals(timespanLink, - TEST_RDF.getProxyList().get(0).getChoiceList().get(3).getCreated().getResource() - .getResource()); - assertEquals(1, TEST_RDF.getTimeSpanList().size()); + testRdf.getProxyList().getFirst().getChoiceList().get(3).getCreated().getResource() + .getResource()); + assertEquals(1, testRdf.getTimeSpanList().size()); //Find the correct links final Set links = new HashSet<>(); @@ -216,12 +215,12 @@ void testRemoveMatchingEntities() { links.add(conceptLink); links.add(placeLink); links.add(timespanLink); - RdfEntityUtils.removeMatchingEntities(TEST_RDF, links); + RdfEntityUtils.removeMatchingEntities(testRdf, links); - assertEquals(0, TEST_RDF.getProxyList().get(0).getChoiceList().size()); - assertEquals(0, TEST_RDF.getAgentList().size()); - assertEquals(0, TEST_RDF.getConceptList().size()); - assertEquals(0, TEST_RDF.getPlaceList().size()); - assertEquals(0, TEST_RDF.getTimeSpanList().size()); + assertEquals(0, testRdf.getProxyList().getFirst().getChoiceList().size()); + assertEquals(0, testRdf.getAgentList().size()); + assertEquals(0, testRdf.getConceptList().size()); + assertEquals(0, testRdf.getPlaceList().size()); + assertEquals(0, testRdf.getTimeSpanList().size()); } } diff --git a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/YearParserTest.java b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/YearParserTest.java index ed0e886119..afba0823c8 100644 --- a/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/YearParserTest.java +++ b/metis-enrichment/metis-enrichment-client/src/test/java/eu/europeana/enrichment/utils/YearParserTest.java @@ -10,10 +10,10 @@ import java.util.regex.Pattern; import org.junit.jupiter.api.Test; -public class YearParserTest { +class YearParserTest { @Test - public void parseTest() { + void parseTest() { // Mock parser final YearParser yearParser = spy(new YearParser()); diff --git a/metis-enrichment/metis-enrichment-common/pom.xml b/metis-enrichment/metis-enrichment-common/pom.xml index a037bec13f..c825aaaea0 100644 --- a/metis-enrichment/metis-enrichment-common/pom.xml +++ b/metis-enrichment/metis-enrichment-common/pom.xml @@ -4,7 +4,7 @@ metis-enrichment eu.europeana.metis - 12.2 + 13 metis-enrichment-common diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/DereferenceResultStatus.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/DereferenceResultStatus.java index 96066db0f3..318bf28eea 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/DereferenceResultStatus.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/DereferenceResultStatus.java @@ -1,15 +1,54 @@ package eu.europeana.enrichment.api.external; +import java.net.URI; + /** * Dereference result status */ public enum DereferenceResultStatus { + + /** + * This means that dereferencing was done successfully. + */ SUCCESS, + + /** + * This means that the resource ID to dereference is not valid (see {@link URI#URI(String)}). + */ INVALID_URL, + + /** + * This means that no vocabulary is known that matches to the resource ID. + */ NO_VOCABULARY_MATCHING, + + /** + * This means that one or more vocabularies could be found matching the resource ID, but no + * entity is known by that ID at source. + */ NO_ENTITY_FOR_VOCABULARY, + + /** + * This means that the source entity was obtained, but an error occurred either when transforming + * it to a contextual class, or when parsing the result of that transformation. + */ ENTITY_FOUND_XML_XSLT_ERROR, + + /** + * This means that the source entity was obtained, but the transformation yielded no resulting + * contextual class (probably by design: the entity was determined not to qualify by the + * transformation). + */ ENTITY_FOUND_XML_XSLT_PRODUCE_NO_CONTEXTUAL_CLASS, + + /** + * This means that an entity ID corresponding to the Europeana entity collection was provided, + * but no entity is known by that ID. + */ UNKNOWN_EUROPEANA_ENTITY, + + /** + * This means an unspecified failure: this should be reported as a bug. + */ FAILURE } diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolver.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolver.java index 266fdcafae..6f950681ef 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolver.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolver.java @@ -103,7 +103,7 @@ private Map> performBatch(boolean uriSearch, List // TODO: 02/06/2022 This is actually bypassing the batching.. This is the selected way to perform this for now. for (I batchItem : batch) { List enrichmentBaseList = performItem(batchItem, uriSearch); - result.put(batchItem, !enrichmentBaseList.isEmpty() ? enrichmentBaseList.stream().filter(Objects::nonNull).collect(Collectors.toList()) : + result.put(batchItem, !enrichmentBaseList.isEmpty() ? enrichmentBaseList.stream().filter(Objects::nonNull).toList() : Collections.emptyList()); } return result; diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/model/TimeSpan.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/model/TimeSpan.java index 3ee452ada0..f9cfd30aff 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/model/TimeSpan.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/external/model/TimeSpan.java @@ -110,7 +110,7 @@ private void init(eu.europeana.entitymanagement.definitions.model.TimeSpan timeS } this.hiddenLabel = convertListToLabel(timeSpan.getHiddenLabel()); if (timeSpan.getIsNextInSequence() != null) { - this.isNextInSequence = new Part(timeSpan.getIsNextInSequence().get(0)); + this.isNextInSequence = new Part(timeSpan.getIsNextInSequence().getFirst()); } } } diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractReferenceTerm.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractReferenceTerm.java index 1e6a1e6675..24e45ab8a8 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractReferenceTerm.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractReferenceTerm.java @@ -11,7 +11,7 @@ public abstract class AbstractReferenceTerm implements ReferenceTerm { private final URL reference; - public AbstractReferenceTerm(URL reference) { + protected AbstractReferenceTerm(URL reference) { this.reference = reference; } diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractSearchTerm.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractSearchTerm.java index 10e2a95106..1453b17ef4 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractSearchTerm.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/AbstractSearchTerm.java @@ -9,7 +9,7 @@ public abstract class AbstractSearchTerm implements SearchTerm { private final String textValue; private final String language; - public AbstractSearchTerm(String textValue, String language) { + protected AbstractSearchTerm(String textValue, String language) { this.textValue = textValue; this.language = language; } diff --git a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/ProxyFieldType.java b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/ProxyFieldType.java index 592f760ab2..8ed0815a0e 100644 --- a/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/ProxyFieldType.java +++ b/metis-enrichment/metis-enrichment-common/src/main/java/eu/europeana/enrichment/api/internal/ProxyFieldType.java @@ -143,8 +143,8 @@ public EntityType getEntityType() { private static final class ChoiceContentHandler { - protected final Predicate choiceChecker; - protected final Function contentGetter; + private final Predicate choiceChecker; + private final Function contentGetter; private final BiConsumer contentSetter; private final Supplier contentCreator; diff --git a/metis-enrichment/metis-enrichment-common/src/test/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolverTest.java b/metis-enrichment/metis-enrichment-common/src/test/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolverTest.java index 07c54a81a3..3c73b51f5f 100644 --- a/metis-enrichment/metis-enrichment-common/src/test/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolverTest.java +++ b/metis-enrichment/metis-enrichment-common/src/test/java/eu/europeana/enrichment/api/external/impl/ClientEntityResolverTest.java @@ -270,7 +270,7 @@ void resolveById(Map referenc .toList(); if (entry.getValue().isChildEuropeanaEntity) { - when(entityClientApi.getEntityById(entry.getKey().getReference().toString())).thenReturn(children.get(0)); + when(entityClientApi.getEntityById(entry.getKey().getReference().toString())).thenReturn(children.getFirst()); } else { when(entityClientApi.getEntityByUri(entry.getKey().getReference().toString())).thenReturn(children); } @@ -358,7 +358,7 @@ void resolveByUri(Map referen .toList(); if (entry.getValue().isChildEuropeanaEntity) { - when(entityClientApi.getEntityById(entry.getKey().getReference().toString())).thenReturn(children.get(0)); + when(entityClientApi.getEntityById(entry.getKey().getReference().toString())).thenReturn(children.getFirst()); } else { when(entityClientApi.getEntityByUri(entry.getKey().getReference().toString())).thenReturn(children); } diff --git a/metis-enrichment/pom.xml b/metis-enrichment/pom.xml index 8daf70cfdf..88b1b7cbc0 100644 --- a/metis-enrichment/pom.xml +++ b/metis-enrichment/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-enrichment pom diff --git a/metis-harvesting/pom.xml b/metis-harvesting/pom.xml index a04ca4e517..ba84ddb79a 100644 --- a/metis-harvesting/pom.xml +++ b/metis-harvesting/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-harvesting diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecord.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecord.java new file mode 100644 index 0000000000..8010a0ea74 --- /dev/null +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecord.java @@ -0,0 +1,52 @@ +package eu.europeana.metis.harvesting; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.time.Instant; + +/** + * This interface represents a harvested record. + */ +public interface FullRecord { + + /** + * Makes the record's content available. + * + * @param outputStream The output stream to which to write the content. The caller needs to close + * it after use. + * @throws IOException In case there was a problem obtaining the content. + * @throws IllegalStateException In case the record is deleted at source (see + * {@link #isDeleted()}). + */ + void writeContent(OutputStream outputStream) throws IOException; + + /** + * Makes the record's content available. + * + * @return An input stream containing the record. The caller needs to close it after use. + * @throws IOException In case there was a problem obtaining the content. + * @throws IllegalStateException In case the record is deleted at source (see + * {@link #isDeleted()}). + */ + InputStream getContent() throws IOException; + + /** + * @return Whether this record is deleted at source. If the specific harvest type does not support + * identifying which records are deleted at source, this method will return false. + */ + boolean isDeleted(); + + /** + * @return The harvesting identifier of the entry. This should be unique. This may be different + * from the record ID (rdf:about) and may exist only in the context of this harvest. + */ + String getHarvestingIdentifier(); + + /** + * Returns the timestamp associated with the record. + * + * @return The timestamp. Or null if no timestamp is known. + */ + Instant getTimeStamp(); +} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecordHarvestingIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecordHarvestingIterator.java new file mode 100644 index 0000000000..3eff5b64fd --- /dev/null +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/FullRecordHarvestingIterator.java @@ -0,0 +1,13 @@ +package eu.europeana.metis.harvesting; + +/** + * Implementations of this interface allow iterative access to records as they are being harvested. + * The iterator needs to be closed after use. + * + * @param The type of the record to harvest. + * @param The type of the object on which filtering is to be applied. + */ +public interface FullRecordHarvestingIterator extends + HarvestingIterator { + +} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/HarvestingIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/HarvestingIterator.java new file mode 100644 index 0000000000..05e8a391ab --- /dev/null +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/HarvestingIterator.java @@ -0,0 +1,53 @@ +package eu.europeana.metis.harvesting; + +import java.io.Closeable; +import java.util.function.Predicate; + +/** + * Implementations of this interface allow iterative access to records as they are being harvested. + * The iterator needs to be closed after use. + * + * @param The type of the record to harvest. + * @param The type of the object on which filtering is to be applied. + */ +public interface HarvestingIterator extends Closeable { + + /** + * Iterate through the records while applying a filter (potentially skipping some records). + * + * @param action The iteration to perform. It needs to return a result. + * @param filter The filter to apply (only records that return true will be sent to the action). + * @throws HarvesterException In case there was a problem while harvesting. + */ + void forEachFiltered(ReportingIteration action, Predicate filter) throws HarvesterException; + + /** + * Iterate through all the records. + * + * @param action The iteration to perform. It needs to return a result. + * @throws HarvesterException In case there was a problem while harvesting. + */ + default void forEach(ReportingIteration action) throws HarvesterException { + forEachFiltered(action, header -> true); + } + + /** + * Iterate through all non-deleted records. If the specific harvest type does not support + * identifying which records are deleted at source, this method will behave just like + * {@link #forEach(ReportingIteration)}. I.e. all records are assumed to not be deleted. + * + * @param action The iteration to perform. It needs to return a result. + * @throws HarvesterException In case there was a problem while harvesting. + */ + void forEachNonDeleted(ReportingIteration action) throws HarvesterException; + + /** + * Attempts to count the number of records. This method may make assumptions, and any result is + * only indicative. Server requests or other IO operations may be performed in order to perform + * this count, so this method is to be used sparingly. + * + * @return The number of records. Or null if the number could not be determined. + * @throws HarvesterException In case something went wrong. + */ + Integer countRecords() throws HarvesterException; +} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/ReportingIteration.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/ReportingIteration.java index 517f66ac37..877ee99dbc 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/ReportingIteration.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/ReportingIteration.java @@ -1,5 +1,7 @@ package eu.europeana.metis.harvesting; +import java.io.IOException; + /** * Implementations of this interface represent an iteration of a data iterator that also reports on * whether to continue. @@ -19,6 +21,9 @@ enum IterationResult {TERMINATE, CONTINUE} * * @param data The data to process. * @return Whether to continue processing. + * @throws IOException in case there was a harvesting related issue. This will cause the remaining + * records not to be processed (as if {@link IterationResult#TERMINATE} was + * passed). */ - IterationResult process(T data); + IterationResult process(T data) throws IOException; } diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/AbstractHttpHarvestIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/AbstractHttpHarvestIterator.java new file mode 100644 index 0000000000..353de5435a --- /dev/null +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/AbstractHttpHarvestIterator.java @@ -0,0 +1,175 @@ +package eu.europeana.metis.harvesting.http; + +import eu.europeana.metis.harvesting.FullRecord; +import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.HarvestingIterator; +import eu.europeana.metis.harvesting.ReportingIteration; +import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; +import eu.europeana.metis.utils.CompressedFileExtension; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.invoke.MethodHandles; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.time.Instant; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Iterator for harvesting + */ +abstract class AbstractHttpHarvestIterator implements HarvestingIterator { + + private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final Path extractedDirectory; + + protected AbstractHttpHarvestIterator(Path extractedDirectory) { + Objects.requireNonNull(extractedDirectory,"Extracted directory is null. This should not happen."); + this.extractedDirectory = extractedDirectory; + } + + protected String getExtractedDirectory() { + return extractedDirectory.toString(); + } + + @Override + public void close() { + try { + FileUtils.deleteDirectory(extractedDirectory.toFile()); + } catch (IOException e) { + LOGGER.warn("Could not delete directory.", e); + } + } + + /** + * Iterate through the record paths while applying a filter (potentially skipping some records). + * + * @param action The iteration to perform. It needs to return a result. + * @param filter The filter to apply (only records that return true will be sent to the action). + * @throws HarvesterException In case there was a problem while harvesting. + */ + public void forEachPathFiltered(ReportingIteration action, Predicate filter) + throws HarvesterException { + try { + Files.walkFileTree(extractedDirectory, new FileIteration(action, filter)); + } catch (IOException e) { + throw new HarvesterException("Exception while iterating through the extracted files.", e); + } + } + + /** + * Iterate through the {@link FullRecord} while applying a filter (potentially skipping some records). + * + * @param action The iteration to perform. It needs to return a result. + * @param filter The filter to apply (only records that return true will be sent to the action). + * @throws HarvesterException In case there was a problem while harvesting. + */ + public void forEachFileFiltered(ReportingIteration action, Predicate filter) + throws HarvesterException { + forEachPathFiltered(path -> { + try (InputStream content = Files.newInputStream(path)) { + return action.process(new FullRecordImpl(extractedDirectory.relativize(path).toString(), + new ByteArrayInputStream(IOUtils.toByteArray(content)))); + } catch (RuntimeException e) { + throw new IOException("Could not process path " + path + ".", e); + } + }, filter); + } + + @Override + public void forEachNonDeleted(ReportingIteration action) throws HarvesterException { + forEach(action); + } + + @Override + public Integer countRecords() throws HarvesterException { + // Go by each path only: no need to inspect the full file. + final AtomicInteger counter = new AtomicInteger(0); + forEachPathFiltered(path -> { + counter.incrementAndGet(); + return IterationResult.CONTINUE; + }, path -> true); + return counter.get(); + } + + private static class FileIteration extends SimpleFileVisitor { + + private static final String MAC_TEMP_FILE = ".DS_Store"; + private static final String MAC_TEMP_FOLDER = "__MACOSX"; + + private final ReportingIteration action; + private final Predicate filter; + + public FileIteration(ReportingIteration action, Predicate filter) { + this.action = action; + this.filter = filter; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (!filter.test(file)) { + return FileVisitResult.CONTINUE; + } + final Path fileName = file.getFileName(); + if (fileName != null && MAC_TEMP_FILE.equals(fileName.toString())) { + return FileVisitResult.CONTINUE; + } + if (CompressedFileExtension.forPath(file) != null) { + return FileVisitResult.CONTINUE; + } + final IterationResult result = action.process(file); + if (result == null) { + throw new IllegalArgumentException("Iteration result cannot be null."); + } + return result == IterationResult.TERMINATE ? FileVisitResult.TERMINATE + : FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { + final Path dirName = dir.getFileName(); + if (dirName != null && MAC_TEMP_FOLDER.equals(dirName.toString())) { + return FileVisitResult.SKIP_SUBTREE; + } + return FileVisitResult.CONTINUE; + } + } + + private record FullRecordImpl(String relativeFilePath, ByteArrayInputStream entryContent) implements FullRecord { + + @Override + public String getHarvestingIdentifier() { + return relativeFilePath; + } + + @Override + public void writeContent(OutputStream outputStream) throws IOException { + IOUtils.copy(entryContent, outputStream); + } + + @Override + public ByteArrayInputStream getContent() { + return entryContent; + } + + @Override + public boolean isDeleted() { + return false; + } + + @Override + public Instant getTimeStamp() { + return null; + } + } +} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvester.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvester.java index 4b75a0d8da..cc56f3bebf 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvester.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvester.java @@ -1,10 +1,13 @@ package eu.europeana.metis.harvesting.http; +import eu.europeana.metis.harvesting.FullRecord; +import eu.europeana.metis.harvesting.FullRecordHarvestingIterator; import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.HarvestingIterator; +import eu.europeana.metis.harvesting.ReportingIteration; import eu.europeana.metis.utils.CompressedFileExtension; -import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.util.function.Consumer; +import java.nio.file.Path; /** * Implementations of this interface provide the functionality to harvest from HTTP (compressed archive). @@ -22,7 +25,7 @@ public interface HttpHarvester { * @return An iterator that provides access to the decompressed records. * @throws HarvesterException In case there was an issue during the harvest. */ - HttpRecordIterator harvestRecords(String archiveUrl, String downloadDirectory) + HarvestingIterator harvestRecords(String archiveUrl, String downloadDirectory) throws HarvesterException; /** @@ -35,34 +38,19 @@ HttpRecordIterator harvestRecords(String archiveUrl, String downloadDirectory) * @param action The action to be performed. * @throws HarvesterException In case there was an issue during the harvest. */ - void harvestRecords(InputStream inputStream, CompressedFileExtension compressedFileType, - Consumer action) throws HarvesterException; + void harvestFullRecords(InputStream inputStream, CompressedFileExtension compressedFileType, + ReportingIteration action) throws HarvesterException; /** - * It creates a {@link HttpRecordIterator} with a InputStream into a temporary file directory. When finished using the created - * iterator, the method {@link HttpRecordIterator#deleteIteratorContent()} should be used to clean up leftover files. + * It creates a {@link HarvestingIterator} with a InputStream into a temporary file directory. When finished using the created + * iterator, the iterator should be closed to clean up leftover files. * * @param input The input stream from which we create the iterator * @param compressedFileType The type of compressed file type * @return A HttpRecordIterator based on a temporary file location * @throws HarvesterException In case there is an issue while using the input stream */ - HttpRecordIterator createTemporaryHttpHarvestIterator(InputStream input, CompressedFileExtension compressedFileType) - throws HarvesterException; - - /** - * An object representing an entry in a file archive. - */ - interface ArchiveEntry { - - /** - * @return The name of the entry. This is the file name (including extension, excluding the path). - */ - String getEntryName(); + FullRecordHarvestingIterator createFullRecordHarvestIterator(InputStream input, + CompressedFileExtension compressedFileType) throws HarvesterException; - /** - * @return The content of the entry (in memory). - */ - ByteArrayInputStream getEntryContent(); - } } diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvesterImpl.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvesterImpl.java index 9db86940d6..e39d201682 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvesterImpl.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpHarvesterImpl.java @@ -4,12 +4,13 @@ import static eu.europeana.metis.utils.TempFileUtils.createSecureTempDirectoryAndFile; import static org.apache.commons.io.FileUtils.copyInputStreamToFile; +import eu.europeana.metis.harvesting.FullRecord; +import eu.europeana.metis.harvesting.FullRecordHarvestingIterator; import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.HarvestingIterator; import eu.europeana.metis.harvesting.ReportingIteration; -import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; import eu.europeana.metis.utils.CompressedFileExtension; import eu.europeana.metis.utils.CompressedFileHandler; -import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -18,24 +19,16 @@ import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; -import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFilePermission; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; import java.util.Set; -import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.stream.Stream; -import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,33 +42,18 @@ public class HttpHarvesterImpl implements HttpHarvester { private static final Logger LOGGER = LoggerFactory.getLogger(HttpHarvesterImpl.class); @Override - public void harvestRecords(InputStream inputStream, CompressedFileExtension compressedFileType, - Consumer action) throws HarvesterException { - - // Now perform the harvesting - go by each file. - final HttpRecordIterator iterator = createTemporaryHttpHarvestIterator(inputStream, compressedFileType); - List> exception = new ArrayList<>(1); - iterator.forEach(path -> { - try (InputStream content = Files.newInputStream(path)) { - action.accept(new ArchiveEntryImpl(path.getFileName().toString(), - new ByteArrayInputStream(IOUtils.toByteArray(content)))); - return IterationResult.CONTINUE; - } catch (IOException | RuntimeException e) { - exception.add(new ImmutablePair<>(path, e)); - return IterationResult.TERMINATE; - } - }); - - iterator.deleteIteratorContent(); - - if (!exception.isEmpty()) { - throw new HarvesterException("Could not process path " + exception.get(0).getKey() + ".", - exception.get(0).getValue()); + public void harvestFullRecords(InputStream inputStream, + CompressedFileExtension compressedFileType, ReportingIteration action) + throws HarvesterException { + try (final HarvestingIterator iterator = createFullRecordHarvestIterator(inputStream, compressedFileType)) { + iterator.forEach(action); + } catch (IOException e) { + throw new HarvesterException("Could not clean up.", e); } } @Override - public HttpRecordIterator harvestRecords(String archiveUrl, String downloadDirectory) + public HarvestingIterator harvestRecords(String archiveUrl, String downloadDirectory) throws HarvesterException { // Download the archive. Note that we allow any directory here (even on other file systems), @@ -90,24 +68,28 @@ public HttpRecordIterator harvestRecords(String archiveUrl, String downloadDirec } // Perform the harvesting - return harvestRecords(downloadedFile); + return new PathIterator(extractArchive(downloadedFile)); } @Override - public HttpRecordIterator createTemporaryHttpHarvestIterator(InputStream input, CompressedFileExtension compressedFileType) - throws HarvesterException { + public FullRecordHarvestingIterator createFullRecordHarvestIterator(InputStream input, + CompressedFileExtension compressedFileType) throws HarvesterException { + return new RecordIterator(extractArchiveSecurely(input, compressedFileType)); + } + + private Path extractArchiveSecurely(InputStream input, + CompressedFileExtension compressedFileType) throws HarvesterException { try { final Path tempFile = createSecureTempDirectoryAndFile(HttpHarvesterImpl.class.getSimpleName(), HttpHarvesterImpl.class.getSimpleName(), compressedFileType.getExtension()); copyInputStreamToFile(input, tempFile.toFile()); - return harvestRecords(tempFile); + return extractArchive(tempFile); } catch (IOException e) { throw new HarvesterException("Problem saving archive.", e); } - } - private HttpRecordIterator harvestRecords(Path archiveFile) throws HarvesterException { + private Path extractArchive(Path archiveFile) throws HarvesterException { // Extract the archive. final Path extractedDirectory = archiveFile.toAbsolutePath().getParent(); @@ -127,8 +109,8 @@ private HttpRecordIterator harvestRecords(Path archiveFile) throws HarvesterExce throw new HarvesterException("Problem correcting directory rights.", e); } - // Return the iterator - return new FileIterator(extractedDirectory); + // Return the extracted directory + return extractedDirectory; } private Path downloadFile(String archiveUrlString, Path downloadDirectory) throws IOException, URISyntaxException { @@ -181,101 +163,35 @@ private static void correctDirectoryRights(Path directory) throws IOException { } } - /** - * Iterator for harvesting - */ - private static class FileIterator implements HttpRecordIterator { - - private final Path extractedDirectory; - - public FileIterator(Path extractedDirectory) { - this.extractedDirectory = extractedDirectory; - } - - @Override - public String getExtractedDirectory() { - return extractedDirectory != null ? extractedDirectory.toString() : ""; - } - - @Override - public void deleteIteratorContent() { - if (extractedDirectory != null) { - try { - FileUtils.deleteDirectory(extractedDirectory.toFile()); - } catch (IOException e) { - LOGGER.warn("Could not delete directory.", e); - } - } else { - LOGGER.warn("Extracted directory undefined, nothing removed."); - } - } - - @Override - public void forEach(ReportingIteration action) throws HarvesterException { - try { - Files.walkFileTree(extractedDirectory, new FileIteration(action)); - } catch (IOException e) { - throw new HarvesterException("Exception while iterating through the extracted files.", e); - } - } - } - - private static class FileIteration extends SimpleFileVisitor { - - private static final String MAC_TEMP_FILE = ".DS_Store"; - private static final String MAC_TEMP_FOLDER = "__MACOSX"; + private static class RecordIterator extends AbstractHttpHarvestIterator + implements FullRecordHarvestingIterator { - private final ReportingIteration action; - - public FileIteration(ReportingIteration action) { - this.action = action; - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { - final Path fileName = file.getFileName(); - if (fileName != null && MAC_TEMP_FILE.equals(fileName.toString())) { - return FileVisitResult.CONTINUE; - } - if (CompressedFileExtension.forPath(file) != null) { - return FileVisitResult.CONTINUE; - } - final IterationResult result = action.process(file); - if (result == null) { - throw new IllegalArgumentException("Iteration result cannot be null."); - } - return IterationResult.TERMINATE == result ? FileVisitResult.TERMINATE - : FileVisitResult.CONTINUE; + public RecordIterator(Path extractedDirectory) { + super(extractedDirectory); } @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { - final Path dirName = dir.getFileName(); - if (dirName != null && MAC_TEMP_FOLDER.equals(dirName.toString())) { - return FileVisitResult.SKIP_SUBTREE; - } - return FileVisitResult.CONTINUE; + public void forEachFiltered(ReportingIteration action, Predicate filter) + throws HarvesterException { + forEachFileFiltered(action, filter); } } - private static class ArchiveEntryImpl implements ArchiveEntry { + private static class PathIterator extends AbstractHttpHarvestIterator { - final String entryName; - final ByteArrayInputStream entryContent; - - public ArchiveEntryImpl(String entryName, ByteArrayInputStream entryContent) { - this.entryName = entryName; - this.entryContent = entryContent; + public PathIterator(Path extractedDirectory) { + super(extractedDirectory); } @Override - public String getEntryName() { - return entryName; + public void forEachFiltered(ReportingIteration action, Predicate filter) + throws HarvesterException { + forEachPathFiltered(action, filter); } @Override - public ByteArrayInputStream getEntryContent() { - return entryContent; + public String getExtractedDirectory() { + return super.getExtractedDirectory(); } } } diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpRecordIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpRecordIterator.java deleted file mode 100644 index 35492c07fb..0000000000 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/http/HttpRecordIterator.java +++ /dev/null @@ -1,46 +0,0 @@ -package eu.europeana.metis.harvesting.http; - -import eu.europeana.metis.harvesting.HarvesterException; -import eu.europeana.metis.harvesting.ReportingIteration; -import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; -import java.nio.file.Path; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * Implementations of this interface provide iterative access to the decompressed results of a HTTP - * (compressed archive) harvest. Note: the class does not clean up the downloaded or decompressed - * files. - */ -public interface HttpRecordIterator { - - /** - * Returns the extracted directory used to create the iterator if there is any - * @return The extracted directory as a string if there is any, empty string if there is none - */ - String getExtractedDirectory(); - - void deleteIteratorContent(); - - /** - * Iterate through the decompressed records. - * - * @param action The iteration action to be executed for each harvested record. - * @throws HarvesterException In case something went wrong during the iteration. - */ - void forEach(ReportingIteration action) throws HarvesterException; - - /** - * Count the number of decompressed records. - * - * @return The number of decompressed records. - * @throws HarvesterException In case something went wrong during the counting. - */ - default int getExpectedRecordCount() throws HarvesterException { - final AtomicInteger counter = new AtomicInteger(0); - forEach(path -> { - counter.incrementAndGet(); - return IterationResult.CONTINUE; - }); - return counter.get(); - } -} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvester.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvester.java index 7cd30b1cfe..5e25efcba2 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvester.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvester.java @@ -1,6 +1,7 @@ package eu.europeana.metis.harvesting.oaipmh; import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.HarvestingIterator; /** * Implementations of this interface provide OAI-PMH harvesting access. @@ -12,9 +13,16 @@ public interface OaiHarvester { * * @param harvest The harvest request to execute. * @return An iterator providing access to the headers. The caller needs to close it after use. - * @throws HarvesterException In case something went wrong. */ - OaiRecordHeaderIterator harvestRecordHeaders(OaiHarvest harvest) throws HarvesterException; + HarvestingIterator harvestRecordHeaders(OaiHarvest harvest); + + /** + * Harvest the full records. + * + * @param harvest The harvest request to execute. + * @return An iterator providing access to the headers. The caller needs to close it after use. + */ + HarvestingIterator harvestRecords(OaiHarvest harvest); /** * Harvest an individual record. diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvesterImpl.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvesterImpl.java index d9e373e3db..5c854e2bc1 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvesterImpl.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvesterImpl.java @@ -3,48 +3,21 @@ import static eu.europeana.metis.utils.SonarqubeNullcheckAvoidanceUtils.performThrowingFunction; import eu.europeana.metis.harvesting.HarvesterException; -import eu.europeana.metis.harvesting.ReportingIteration; -import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; -import io.gdcc.xoai.model.oaipmh.results.record.Header; +import eu.europeana.metis.harvesting.HarvestingIterator; +import eu.europeana.metis.harvesting.oaipmh.OaiHarvestingIterator.RecordOaiHeaderPostProcessing; import io.gdcc.xoai.model.oaipmh.verbs.Verb; -import io.gdcc.xoai.serviceprovider.ServiceProvider; -import io.gdcc.xoai.serviceprovider.exceptions.BadArgumentException; import io.gdcc.xoai.serviceprovider.exceptions.OAIRequestException; -import io.gdcc.xoai.serviceprovider.model.Context; import io.gdcc.xoai.serviceprovider.parameters.GetRecordParameters; -import io.gdcc.xoai.serviceprovider.parameters.ListIdentifiersParameters; import io.gdcc.xoai.serviceprovider.parameters.Parameters; import java.io.IOException; import java.io.InputStream; -import java.util.Iterator; -import java.util.Optional; -import java.util.function.Predicate; -import javax.xml.XMLConstants; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpression; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; -import javax.xml.xpath.XPathFactoryConfigurationException; import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Node; -import org.xml.sax.InputSource; /** * This class provides an implementation of the {@link OaiHarvester} functionality. */ public class OaiHarvesterImpl implements OaiHarvester { - private static final Logger LOGGER = LoggerFactory.getLogger(OaiHarvesterImpl.class); - - private static final String COMPLETE_LIST_SIZE_XPATH = - "/*[local-name()='OAI-PMH']" + - "/*[local-name()='ListIdentifiers']" + - "/*[local-name()='resumptionToken']"; - public static final String COMPLETE_LIST_SIZE = "completeListSize"; - private final ConnectionClientFactory connectionClientFactory; /** @@ -57,53 +30,41 @@ public OaiHarvesterImpl(ConnectionClientFactory connectionClientFactory) { } @Override - public OaiRecordHeaderIterator harvestRecordHeaders(OaiHarvest harvest) - throws HarvesterException { - final ListIdentifiersParameters parameters = prepareListIdentifiersParameters(harvest); - final Iterator
iterator; - final CloseableOaiClient client = connectionClientFactory.createConnectionClient( - harvest.getRepositoryUrl()); - try { - iterator = new ServiceProvider(new Context().withOAIClient(client)) - .listIdentifiers(parameters); - } catch (RuntimeException | BadArgumentException e) { - try { - client.close(); - } catch (IOException ioException) { - LOGGER.info("Could not close connection client.", ioException); - } - throw new HarvesterException(e.getMessage(), e); - } - return new HeaderIterator(iterator, client); + public HarvestingIterator harvestRecordHeaders(OaiHarvest oaiHarvest) { + return new OaiHarvestingIterator<>(connectionClientFactory.createConnectionClient( + oaiHarvest.getRepositoryUrl()), oaiHarvest, (header, oaiClient, harvest) -> header); } - private static ListIdentifiersParameters prepareListIdentifiersParameters(OaiHarvest harvest) { - ListIdentifiersParameters parameters = ListIdentifiersParameters.request() - .withMetadataPrefix(harvest.getMetadataPrefix()); - if (harvest.getFrom() != null) { - parameters.withFrom(harvest.getFrom()); - } - if (harvest.getUntil() != null) { - parameters.withUntil(harvest.getUntil()); - } - if (harvest.getSetSpec() != null) { - parameters.withSetSpec(harvest.getSetSpec()); - } - return parameters; + @Override + public HarvestingIterator harvestRecords(OaiHarvest oaiHarvest) { + final RecordOaiHeaderPostProcessing postProcessing = (header, oaiClient, harvest) -> harvestRecord(oaiClient, harvest, + header.getOaiIdentifier()); + return new OaiHarvestingIterator<>(connectionClientFactory.createConnectionClient( + oaiHarvest.getRepositoryUrl()), oaiHarvest, postProcessing); } @Override public OaiRecord harvestRecord(OaiRepository repository, String oaiIdentifier) throws HarvesterException { + try (final CloseableOaiClient oaiClient = connectionClientFactory + .createConnectionClient(repository.getRepositoryUrl())) { + return harvestRecord(oaiClient, repository, oaiIdentifier); + } catch (IOException e) { + throw new HarvesterException(String.format( + "Problem with harvesting record %1$s for endpoint %2$s because of: %3$s", + oaiIdentifier, repository.getRepositoryUrl(), e.getMessage()), e); + } + } + + private static OaiRecord harvestRecord(CloseableOaiClient oaiClient, OaiRepository repository, + String oaiIdentifier) throws HarvesterException { final GetRecordParameters getRecordParameters = GetRecordParameters.request() .withIdentifier(oaiIdentifier) .withMetadataFormatPrefix(repository.getMetadataPrefix()); final Parameters parameters = Parameters.parameters().withVerb(Verb.Type.GetRecord) .include(getRecordParameters); final byte[] byteArrayRecord; - try (final CloseableOaiClient oaiClient = connectionClientFactory - .createConnectionClient(repository.getRepositoryUrl()); - final InputStream recordStream = performThrowingFunction(oaiClient, + try (final InputStream recordStream = performThrowingFunction(oaiClient, client -> client.execute(parameters))) { byteArrayRecord = IOUtils.toByteArray(recordStream); } catch (OAIRequestException | IOException e) { @@ -116,45 +77,13 @@ public OaiRecord harvestRecord(OaiRepository repository, String oaiIdentifier) @Override public Integer countRecords(OaiHarvest harvest) throws HarvesterException { - final Parameters parameters = Parameters.parameters().withVerb(Verb.Type.ListIdentifiers) - .include(prepareListIdentifiersParameters(harvest)); - try (final CloseableOaiClient oaiClient = connectionClientFactory - .createConnectionClient(harvest.getRepositoryUrl()); - final InputStream listIdentifiersResponse = performThrowingFunction(oaiClient, - client -> client.execute(parameters))) { - return readCompleteListSizeFromXML(listIdentifiersResponse); - } catch (OAIRequestException | IOException e) { - throw new HarvesterException(String.format( - "Problem with counting records for endpoint %1$s because of: %2$s", - harvest.getRepositoryUrl(), e.getMessage()), e); + try (HarvestingIterator iterator = harvestRecordHeaders(harvest)) { + return iterator.countRecords(); + } catch (IOException e) { + throw new HarvesterException("Problem while closing iterator.", e); } } - private Integer readCompleteListSizeFromXML(InputStream stream) throws HarvesterException { - final XPathExpression expr; - try { - final XPathFactory xpathFactory = XPathFactory.newInstance(); - xpathFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); - final XPath xpath = xpathFactory.newXPath(); - expr = xpath.compile(COMPLETE_LIST_SIZE_XPATH); - } catch (XPathExpressionException | XPathFactoryConfigurationException e) { - throw new HarvesterException("Cannot compile xpath expression.", e); - } - try { - final Node resumptionTokenNode = (Node) expr - .evaluate(new InputSource(stream), XPathConstants.NODE); - if (resumptionTokenNode != null) { - final Node node = resumptionTokenNode.getAttributes().getNamedItem(COMPLETE_LIST_SIZE); - if (node != null) { - return Integer.valueOf(node.getNodeValue()); - } - } - } catch (NumberFormatException | XPathExpressionException e) { - LOGGER.debug("Cannot read completeListSize from OAI response ", e); - } - return null; - } - /** * Implementations of this interface can provide connection clients. */ @@ -167,69 +96,6 @@ public interface ConnectionClientFactory { * @return A connection instance. */ CloseableOaiClient createConnectionClient(String oaiPmhEndpoint); - } - /** - * Iterator for harvesting. It wraps a source iterator and provides additional closing functionality for the connection client. - */ - private static class HeaderIterator implements OaiRecordHeaderIterator { - - private final Iterator
source; - private final CloseableOaiClient oaiClient; - - /** - * Constructor. - * - * @param source The source iterator. - * @param oaiClient The client to close when the iterator is closed. - */ - public HeaderIterator(Iterator
source, CloseableOaiClient oaiClient) { - this.source = source; - this.oaiClient = oaiClient; - } - - @Override - public void forEachFiltered(final ReportingIteration action, - final Predicate filter) throws HarvesterException { - final ReportingIterationWrapper singleIteration = new ReportingIterationWrapper(action, - filter); - try { - while (source.hasNext()) { - final IterationResult result = singleIteration.process(source.next()); - if (IterationResult.TERMINATE == result) { - break; - } - } - } catch (RuntimeException e) { - throw new HarvesterException("Problem while iterating through OAI headers.", e); - } - } - - @Override - public void close() throws IOException { - this.oaiClient.close(); - } - } - - private static class ReportingIterationWrapper implements ReportingIteration
{ - - private final ReportingIteration action; - private final Predicate filter; - - public ReportingIterationWrapper(ReportingIteration action, - Predicate filter) { - this.action = action; - this.filter = filter; - } - - @Override - public IterationResult process(Header input) { - final OaiRecordHeader header = OaiRecordHeader.convert(input); - if (filter.test(header)) { - return Optional.ofNullable(action.process(header)).orElseThrow(() -> - new IllegalArgumentException("Iteration result cannot be null.")); - } - return IterationResult.CONTINUE; - } } } diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvestingIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvestingIterator.java new file mode 100644 index 0000000000..2861d35c88 --- /dev/null +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiHarvestingIterator.java @@ -0,0 +1,218 @@ +package eu.europeana.metis.harvesting.oaipmh; + +import static eu.europeana.metis.utils.SonarqubeNullcheckAvoidanceUtils.performThrowingFunction; + +import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.HarvestingIterator; +import eu.europeana.metis.harvesting.ReportingIteration; +import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; +import io.gdcc.xoai.model.oaipmh.results.record.Header; +import io.gdcc.xoai.model.oaipmh.verbs.Verb; +import io.gdcc.xoai.serviceprovider.ServiceProvider; +import io.gdcc.xoai.serviceprovider.exceptions.BadArgumentException; +import io.gdcc.xoai.serviceprovider.exceptions.OAIRequestException; +import io.gdcc.xoai.serviceprovider.model.Context; +import io.gdcc.xoai.serviceprovider.parameters.ListIdentifiersParameters; +import io.gdcc.xoai.serviceprovider.parameters.Parameters; +import java.io.IOException; +import java.io.InputStream; +import java.lang.invoke.MethodHandles; +import java.util.Iterator; +import java.util.Optional; +import java.util.function.Predicate; +import javax.xml.XMLConstants; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpression; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import javax.xml.xpath.XPathFactoryConfigurationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; + +/** + * Iterator for harvesting. It wraps a source iterator and provides additional closing functionality for the connection client. + */ +class OaiHarvestingIterator implements HarvestingIterator { + + private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String COMPLETE_LIST_SIZE_XPATH = + "/*[local-name()='OAI-PMH']" + + "/*[local-name()='ListIdentifiers']" + + "/*[local-name()='resumptionToken']"; + public static final String COMPLETE_LIST_SIZE = "completeListSize"; + private Iterator
source = null; + + private final CloseableOaiClient oaiClient; + private final OaiHarvest harvest; + private final RecordOaiHeaderPostProcessing postProcessing; + + /** + * Constructor. + * + * @param oaiClient The client to close when the iterator is closed. + * @param harvest The harvest request to execute. + * @param recordOaiHeaderPostProcessing post processing after harvested header. + */ + protected OaiHarvestingIterator(CloseableOaiClient oaiClient, OaiHarvest harvest, + RecordOaiHeaderPostProcessing recordOaiHeaderPostProcessing) { + this.oaiClient = oaiClient; + this.harvest = harvest; + this.postProcessing = recordOaiHeaderPostProcessing; + } + + private Iterator
getOrCreateSource() throws HarvesterException { + if (this.source != null) { + return source; + } + final ListIdentifiersParameters parameters = prepareListIdentifiersParameters(harvest); + try { + this.source = new ServiceProvider(new Context().withOAIClient(this.oaiClient)) + .listIdentifiers(parameters); + } catch (RuntimeException | BadArgumentException e) { + try { + this.close(); + } catch (IOException ioException) { + LOGGER.info("Could not close connection client.", ioException); + } + throw new HarvesterException(e.getMessage(), e); + } + return this.source; + } + + private static ListIdentifiersParameters prepareListIdentifiersParameters(OaiHarvest harvest) { + ListIdentifiersParameters parameters = ListIdentifiersParameters.request() + .withMetadataPrefix(harvest.getMetadataPrefix()); + if (harvest.getFrom() != null) { + parameters.withFrom(harvest.getFrom()); + } + if (harvest.getUntil() != null) { + parameters.withUntil(harvest.getUntil()); + } + if (harvest.getSetSpec() != null) { + parameters.withSetSpec(harvest.getSetSpec()); + } + return parameters; + } + + @Override + public void forEachFiltered(ReportingIteration action, + Predicate filter) throws HarvesterException { + forEachWithPostProcessing(action, postProcessing, filter); + } + + private void forEachWithPostProcessing(final ReportingIteration action, + final RecordOaiHeaderPostProcessing postProcessing, final Predicate filter) + throws HarvesterException { + final SingleIteration singleIteration = new SingleIteration<>(filter, postProcessing, action, oaiClient, harvest); + try { + while (getOrCreateSource().hasNext()) { + final Header header = Optional.of(getOrCreateSource().next()) + .orElseThrow(() -> new HarvesterException("Unexpected null header.")); + if (singleIteration.process(header) == IterationResult.TERMINATE) { + break; + } + } + } catch (RuntimeException e) { + throw new HarvesterException("Problem while iterating through OAI headers.", e); + } + } + + @Override + public void forEachNonDeleted(ReportingIteration action) throws HarvesterException { + forEachFiltered(action, Predicate.not(OaiRecordHeader::isDeleted)); + } + + @Override + public Integer countRecords() throws HarvesterException { + final Parameters parameters = Parameters.parameters().withVerb(Verb.Type.ListIdentifiers) + .include(prepareListIdentifiersParameters(harvest)); + try (final InputStream listIdentifiersResponse = performThrowingFunction(oaiClient, + client -> client.execute(parameters))) { + return readCompleteListSizeFromXML(listIdentifiersResponse); + } catch (OAIRequestException | IOException e) { + throw new HarvesterException(String.format( + "Problem with counting records for endpoint %1$s because of: %2$s", + harvest.getRepositoryUrl(), e.getMessage()), e); + } + } + + @Override + public void close() throws IOException { + this.oaiClient.close(); + } + + private static Integer readCompleteListSizeFromXML(InputStream stream) throws HarvesterException { + final XPathExpression expr; + try { + final XPathFactory xpathFactory = XPathFactory.newInstance(); + xpathFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); + final XPath xpath = xpathFactory.newXPath(); + expr = xpath.compile(COMPLETE_LIST_SIZE_XPATH); + } catch (XPathExpressionException | XPathFactoryConfigurationException e) { + throw new HarvesterException("Cannot compile xpath expression.", e); + } + try { + final Node resumptionTokenNode = (Node) expr + .evaluate(new InputSource(stream), XPathConstants.NODE); + if (resumptionTokenNode != null) { + final Node node = resumptionTokenNode.getAttributes().getNamedItem(COMPLETE_LIST_SIZE); + if (node != null) { + return Integer.valueOf(node.getNodeValue()); + } + } + } catch (NumberFormatException | XPathExpressionException e) { + LOGGER.debug("Cannot read completeListSize from OAI response ", e); + } + return null; + } + + /** + * Functional interface for record oai header post-processing. + * + * @param the return value of the function + */ + @FunctionalInterface + public interface RecordOaiHeaderPostProcessing { + + /** + * Applies this function to the given arguments. + * + * @param oaiRecordHeader the oai record header + * @param oaiClient the oai client + * @param oaiHarvest the oai harvest + * @return the result of the function + * @throws HarvesterException if something went wrong + */ + O apply(OaiRecordHeader oaiRecordHeader, CloseableOaiClient oaiClient, OaiHarvest oaiHarvest) throws HarvesterException; + } + + private record SingleIteration(Predicate filter, + RecordOaiHeaderPostProcessing recordOaiHeaderPostProcessing, + ReportingIteration action, CloseableOaiClient oaiClient, OaiHarvest harvest) { + + /** + * Process provided header and return. + * + * @param header the header + * @return the iteration result indicating whether there is more items available. + * @throws HarvesterException if something went wrong + */ + public IterationResult process(Header header) throws HarvesterException { + final OaiRecordHeader oaiRecordHeader = OaiRecordHeader.convert(header); + if (filter.test(oaiRecordHeader)) { + final O postProcessResult = Optional.ofNullable(recordOaiHeaderPostProcessing.apply(oaiRecordHeader, oaiClient, harvest)) + .orElseThrow(() -> new HarvesterException("Post processing result cannot be null.")); + try { + return Optional.ofNullable(action.process(postProcessResult)) + .orElseThrow(() -> new HarvesterException("Iteration result cannot be null.")); + } catch (IOException e) { + throw new HarvesterException("Problem while processing: " + oaiRecordHeader.getOaiIdentifier(), e); + } + } + return IterationResult.CONTINUE; + } + } +} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecord.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecord.java index f6602c138e..207215af99 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecord.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecord.java @@ -1,17 +1,20 @@ package eu.europeana.metis.harvesting.oaipmh; -import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.FullRecord; import java.io.ByteArrayInputStream; -import java.io.InputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.time.Instant; import java.util.function.Supplier; /** - * This is a immutable class representing an (OAI-embedded) record along with it's OAI header. + * This is a immutable class representing an (OAI-embedded) record along with it's OAI header. The + * harvesting identifier is the OAI identifier. */ -public class OaiRecord { +public class OaiRecord implements FullRecord { private final OaiRecordHeader header; - private final byte[] record; + private final byte[] content; /** * Constructor. @@ -21,23 +24,41 @@ public class OaiRecord { */ public OaiRecord(OaiRecordHeader header, Supplier recordSupplier) { this.header = header; - this.record = this.header.isDeleted() ? new byte[0] : recordSupplier.get(); + this.content = this.header.isDeleted() ? new byte[0] : recordSupplier.get(); } public OaiRecordHeader getHeader() { return header; } - /** - * Makes the embedded record available. - * - * @return An input stream containing the record. The caller needs to close it after use. - * @throws HarvesterException In case the record is marked as deleted. - */ - public InputStream getRecord() throws HarvesterException { - if (getHeader().isDeleted()) { - throw new HarvesterException("The record is deleted."); + @Override + public String getHarvestingIdentifier() { + return getHeader().getOaiIdentifier(); + } + + @Override + public void writeContent(OutputStream outputStream) throws IOException { + if (isDeleted()) { + throw new IllegalStateException("Record is deleted at source."); + } + outputStream.write(this.content); + } + + @Override + public ByteArrayInputStream getContent() { + if (isDeleted()) { + throw new IllegalStateException("Record is deleted at source."); } - return new ByteArrayInputStream(record); + return new ByteArrayInputStream(content); + } + + @Override + public boolean isDeleted() { + return getHeader().isDeleted(); + } + + @Override + public Instant getTimeStamp() { + return header.getDatestamp(); } } diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordHeaderIterator.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordHeaderIterator.java deleted file mode 100644 index b39cf1d016..0000000000 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordHeaderIterator.java +++ /dev/null @@ -1,45 +0,0 @@ -package eu.europeana.metis.harvesting.oaipmh; - -import eu.europeana.metis.harvesting.HarvesterException; -import eu.europeana.metis.harvesting.ReportingIteration; -import java.io.Closeable; -import java.util.function.Predicate; - -/** - * Implementations of this interface allow iterative access to records harvested using OAI-PMH. The - * iterator needs to be closed after use. - */ -public interface OaiRecordHeaderIterator extends Closeable { - - /** - * Iterate through the records while applying a filter (potentially skipping some records). - * - * @param action The iteration to perform. It needs to return a result. - * @param filter The filter to apply (only records that return true will be sent to the action). - * @throws HarvesterException In case there was a problem while harvesting. - */ - void forEachFiltered(ReportingIteration action, - Predicate filter) - throws HarvesterException; - - /** - * Iterate through all the records. - * - * @param action The iteration to perform. It needs to return a result. - * @throws HarvesterException In case there was a problem while harvesting. - */ - default void forEach(ReportingIteration action) throws HarvesterException { - forEachFiltered(action, header -> true); - } - - /** - * Iterate through all non-deleted records. - * - * @param action The iteration to perform. It needs to return a result. - * @throws HarvesterException In case there was a problem while harvesting. - */ - default void forEachNonDeleted(ReportingIteration action) - throws HarvesterException { - forEachFiltered(action, Predicate.not(OaiRecordHeader::isDeleted)); - } -} diff --git a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordParser.java b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordParser.java index 0483470aaf..b9dc5d8107 100644 --- a/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordParser.java +++ b/metis-harvesting/src/main/java/eu/europeana/metis/harvesting/oaipmh/OaiRecordParser.java @@ -154,7 +154,7 @@ private static boolean getAsBytes(byte[] oaiRecord, XPathExpression xPathExpress final TransformerFactory transformerFactory = new net.sf.saxon.TransformerFactoryImpl(); final Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); - transformer.transform(nodes.get(0), outputTarget); + transformer.transform(nodes.getFirst(), outputTarget); return true; } } diff --git a/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiHarvesterImplTest.java b/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiHarvesterImplTest.java index 4a00691995..268903f0e8 100644 --- a/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiHarvesterImplTest.java +++ b/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiHarvesterImplTest.java @@ -53,7 +53,7 @@ void shouldHarvestRecord() throws IOException, HarvesterException { //when final InputStream result = harvester - .harvestRecord(new OaiRepository(OAI_PMH_ENDPOINT, "oai_dc"), recordId).getRecord(); + .harvestRecord(new OaiRepository(OAI_PMH_ENDPOINT, "oai_dc"), recordId).getContent(); //then final String actual = TestHelper.convertToString(result); @@ -74,7 +74,7 @@ void shouldHandleDeletedRecords() throws Exception { final OaiHarvesterImpl harvester = new OaiHarvesterImpl(CONNECTION_CLIENT_FACTORY); final OaiRecord oaiRecord = harvester.harvestRecord(new OaiRepository(OAI_PMH_ENDPOINT, "oai_dc"), recordId); - assertThrows(HarvesterException.class, oaiRecord::getRecord); + assertThrows(IllegalStateException.class, oaiRecord::getContent); } @Test diff --git a/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiRecordParserTest.java b/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiRecordParserTest.java index 0e709d52ec..e269dcfd92 100644 --- a/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiRecordParserTest.java +++ b/metis-harvesting/src/test/java/eu/europeana/metis/harvesting/OaiRecordParserTest.java @@ -38,7 +38,7 @@ void happyFlowExtantRecords() throws IOException, HarvesterException { assertEquals(LocalDateTime.of(1981, 7, 1, 0, 0).toInstant(ZoneOffset.UTC), result.getHeader().getDatestamp()); assertFalse(result.getHeader().isDeleted()); - final String actual = TestHelper.convertToString(result.getRecord()); + final String actual = TestHelper.convertToString(result.getContent()); assertThat(actual, TestHelper.isSimilarXml(WiremockHelper.getFileContent("/expectedOaiRecord.xml"))); } @@ -59,7 +59,7 @@ void happyFlowDeletedRecords() throws IOException, HarvesterException { assertEquals("oai:mediateka.centrumzamenhofa.pl:20", result.getHeader().getOaiIdentifier()); assertEquals(LocalDateTime.of(2020, 2, 2, 12, 21).toInstant(ZoneOffset.UTC), result.getHeader().getDatestamp()); - assertThrows(HarvesterException.class, result::getRecord); + assertThrows(IllegalStateException.class, result::getContent); } @Test diff --git a/metis-indexing/pom.xml b/metis-indexing/pom.xml index 97d895ab14..e84d6c4f09 100644 --- a/metis-indexing/pom.xml +++ b/metis-indexing/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-indexing @@ -123,14 +123,4 @@ annotations - - - - - - build-helper-maven-plugin - org.codehaus.mojo - - - diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/AbstractConnectionProvider.java b/metis-indexing/src/main/java/eu/europeana/indexing/AbstractConnectionProvider.java index 326995107e..35ea48e645 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/AbstractConnectionProvider.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/AbstractConnectionProvider.java @@ -30,7 +30,7 @@ public interface AbstractConnectionProvider extends Closeable { * @return A publisher. */ default FullBeanPublisher getFullBeanPublisher(boolean preserveUpdateAndCreateTimesFromRdf) { - return new FullBeanPublisher(getRecordDao(), getRecordRedirectDao(), getSolrClient(), + return new FullBeanPublisher(getRecordDao(), getTombstoneRecordDao(), getRecordRedirectDao(), getSolrClient(), preserveUpdateAndCreateTimesFromRdf); } @@ -56,7 +56,7 @@ default void triggerFlushOfPendingChanges(boolean blockUntilComplete) * @return A dataset remover. */ default IndexedRecordAccess getIndexedRecordAccess() { - return new IndexedRecordAccess(getRecordDao(), getSolrClient()); + return new IndexedRecordAccess(getRecordDao(), getTombstoneRecordDao(), getSolrClient()); } /** @@ -73,6 +73,13 @@ default IndexedRecordAccess getIndexedRecordAccess() { */ RecordDao getRecordDao(); + /** + * Provides a Mongo client object for connecting with the Mongo tombstone database. + * + * @return A Mongo client. + */ + RecordDao getTombstoneRecordDao(); + /** * Provides a Mongo redirect dao. * diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/ClientsConnectionProvider.java b/metis-indexing/src/main/java/eu/europeana/indexing/ClientsConnectionProvider.java index 8cfb0647df..7bd2b3d8c3 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/ClientsConnectionProvider.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/ClientsConnectionProvider.java @@ -1,45 +1,48 @@ package eu.europeana.indexing; import eu.europeana.metis.mongo.dao.RecordDao; -import eu.europeana.indexing.exception.SetupRelatedIndexingException; import eu.europeana.metis.mongo.dao.RecordRedirectDao; +import java.util.Objects; import org.apache.solr.client.solrj.SolrClient; /** - * This class is an implementation of {@link AbstractConnectionProvider} that sets up the connection - * using provided Solr and Mongo clients. Note: the caller is responsible for closing those - * connections. + * This class is an implementation of {@link AbstractConnectionProvider} that sets up the connection using provided Solr and Mongo + * clients. Note: the caller is responsible for closing those connections. * * @author jochen */ -final class ClientsConnectionProvider implements AbstractConnectionProvider { +record ClientsConnectionProvider(RecordDao recordDao, RecordDao tombstoneRecordDao, RecordRedirectDao recordRedirectDao, + SolrClient solrClient) implements AbstractConnectionProvider { - private final RecordDao edmMongoClient; - private final RecordRedirectDao recordRedirectDao; - private final SolrClient solrClient; + /** + * Constructor. + * + * @param recordDao The Mongo dao to be used. Cannot be null. + * @param tombstoneRecordDao The Mongo tombstone dao to be used. Cannot be null. + * @param recordRedirectDao The record redirect dao. + * @param solrClient The Solr client to be used. Cannot be null. + */ + public ClientsConnectionProvider { + Objects.requireNonNull(recordDao, "recordDao cannot be null"); + Objects.requireNonNull(solrClient, "solrClient cannot be null"); + } /** * Constructor. * - * @param edmMongoClient The Mongo client to be used. Cannot be null. + * @param recordDao The Mongo dao to be used. Cannot be null. * @param recordRedirectDao The record redirect dao. * @param solrClient The Solr client to be used. Cannot be null. - * @throws SetupRelatedIndexingException In case either of the two clients are null. */ - ClientsConnectionProvider(RecordDao edmMongoClient, RecordRedirectDao recordRedirectDao, - SolrClient solrClient) - throws SetupRelatedIndexingException { - if (edmMongoClient == null) { - throw new SetupRelatedIndexingException("The provided Mongo client is null."); - } - if (solrClient == null) { - throw new SetupRelatedIndexingException("The provided Solr client is null."); - } - this.edmMongoClient = edmMongoClient; - this.recordRedirectDao = recordRedirectDao; - this.solrClient = solrClient; + public ClientsConnectionProvider(RecordDao recordDao, RecordRedirectDao recordRedirectDao, SolrClient solrClient) { + this(recordDao, null, recordRedirectDao, solrClient); } + @Override + public void close() { + // Nothing to do: the two clients are to be closed by the caller. + } + @Override public SolrClient getSolrClient() { return solrClient; @@ -47,16 +50,16 @@ public SolrClient getSolrClient() { @Override public RecordDao getRecordDao() { - return edmMongoClient; + return recordDao; } @Override - public RecordRedirectDao getRecordRedirectDao() { - return recordRedirectDao; + public RecordDao getTombstoneRecordDao() { + return tombstoneRecordDao; } @Override - public void close() { - // Nothing to do: the two clients are to be closed by the caller. + public RecordRedirectDao getRecordRedirectDao() { + return recordRedirectDao; } } diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/FullBeanPublisher.java b/metis-indexing/src/main/java/eu/europeana/indexing/FullBeanPublisher.java index fa51fa80f5..185cabd42d 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/FullBeanPublisher.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/FullBeanPublisher.java @@ -33,7 +33,6 @@ import java.util.List; import java.util.Map; import java.util.function.Supplier; -import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -63,7 +62,8 @@ public class FullBeanPublisher { private final Supplier fullBeanConverterSupplier; - private final RecordDao edmMongoClient; + private final RecordDao recordDao; + private final RecordDao tombstoneRecordDao; private final SolrClient solrServer; private final boolean preserveUpdateAndCreateTimesFromRdf; private final RecordRedirectDao recordRedirectDao; @@ -71,22 +71,24 @@ public class FullBeanPublisher { /** * Constructor. * - * @param edmMongoClient The Mongo persistence. + * @param recordDao The Mongo persistence. + * @param tombstoneRecordDao The mongo tombstone persistence. * @param recordRedirectDao The record redirect dao * @param solrServer The searchable persistence. * @param preserveUpdateAndCreateTimesFromRdf This determines whether this publisher will use the updated and created times from * the incoming RDFs, or whether it computes its own. */ - FullBeanPublisher(RecordDao edmMongoClient, RecordRedirectDao recordRedirectDao, + FullBeanPublisher(RecordDao recordDao, RecordDao tombstoneRecordDao, RecordRedirectDao recordRedirectDao, SolrClient solrServer, boolean preserveUpdateAndCreateTimesFromRdf) { - this(edmMongoClient, recordRedirectDao, solrServer, preserveUpdateAndCreateTimesFromRdf, + this(recordDao, tombstoneRecordDao, recordRedirectDao, solrServer, preserveUpdateAndCreateTimesFromRdf, RdfToFullBeanConverter::new); } /** * Constructor for testing purposes. * - * @param edmMongoClient The Mongo persistence. + * @param recordDao The Mongo persistence. + * @param tombstoneRecordDao The Mongo persistence. * @param recordRedirectDao The record redirect dao * @param solrServer The searchable persistence. * @param preserveUpdateAndCreateTimesFromRdf This determines whether this publisher will use the updated and created times from @@ -94,14 +96,15 @@ public class FullBeanPublisher { * @param fullBeanConverterSupplier Supplies an instance of {@link RdfToFullBeanConverter} used to parse strings to instances of * {@link FullBeanImpl}. Will be called once during every publish. */ - FullBeanPublisher(RecordDao edmMongoClient, RecordRedirectDao recordRedirectDao, + FullBeanPublisher(RecordDao recordDao, RecordDao tombstoneRecordDao, RecordRedirectDao recordRedirectDao, SolrClient solrServer, boolean preserveUpdateAndCreateTimesFromRdf, Supplier fullBeanConverterSupplier) { - this.edmMongoClient = edmMongoClient; + this.recordDao = recordDao; + this.tombstoneRecordDao = tombstoneRecordDao; + this.recordRedirectDao = recordRedirectDao; this.solrServer = solrServer; this.fullBeanConverterSupplier = fullBeanConverterSupplier; this.preserveUpdateAndCreateTimesFromRdf = preserveUpdateAndCreateTimesFromRdf; - this.recordRedirectDao = recordRedirectDao; } private static void setUpdateAndCreateTime(IdBean current, FullBean updated, @@ -181,7 +184,7 @@ private void publish(RdfWrapper rdf, Date recordDate, List datasetIdsToR final List> recordsForRedirection = performRedirection(rdf, recordDate, datasetIdsToRedirectFrom, performRedirects); - final FullBeanImpl savedFullBean = publishToMongo(recordDate, fullBean, fullBeanPreprocessor, + final FullBeanImpl savedFullBean = publishToRecordMongo(recordDate, fullBean, fullBeanPreprocessor, recordsForRedirection); publishToSolrFinal(rdf, savedFullBean); @@ -205,13 +208,30 @@ public void publishMongo(RdfWrapper rdf, Date recordDate) throws IndexingExcepti final TriConsumer> fullBeanPreprocessor = providePreprocessor(); - publishToMongo(recordDate, fullBean, fullBeanPreprocessor, Collections.emptyList()); + publishToRecordMongo(recordDate, fullBean, fullBeanPreprocessor, Collections.emptyList()); + } + + /** + * Publishes an RDF only to tombstone mongo. + * @param fullBean Fullbean to publish. + * @param recordDate the data that would represent the created/updated date of a record + * @throws IndexingException which can be one of: + *
    + *
  • {@link IndexerRelatedIndexingException} In case an error occurred during publication.
  • + *
  • {@link SetupRelatedIndexingException} in case an error occurred during indexing setup
  • + *
  • {@link RecordRelatedIndexingException} in case an error occurred related to record + * contents
  • + *
+ */ + public void publishTombstone(FullBeanImpl fullBean, Date recordDate) throws IndexingException { + final TriConsumer> fullBeanPreprocessor = providePreprocessor(); + publishToTombstoneMongo(recordDate, fullBean, fullBeanPreprocessor, Collections.emptyList()); } /** * Publishes an RDF to solr server * - * @param rdf RDF to publish. + * @param rdfWrapper RDF to publish. * @param recordDate The date that would represent the created/updated date of a record * @throws IndexingException which can be one of: *
    @@ -221,26 +241,26 @@ public void publishMongo(RdfWrapper rdf, Date recordDate) throws IndexingExcepti * contents *
*/ - public void publishSolr(RdfWrapper rdf, Date recordDate) throws IndexingException { - final FullBeanImpl fullBean = convertRDFToFullBean(rdf); + public void publishSolr(RdfWrapper rdfWrapper, Date recordDate) throws IndexingException { + final FullBeanImpl fullBean = convertRDFToFullBean(rdfWrapper); if (!preserveUpdateAndCreateTimesFromRdf) { Date createdDate; - if (rdf.getAbout() == null) { + if (rdfWrapper.getAbout() == null) { createdDate = recordDate; } else { - final String solrQuery = String.format("%s:\"%s\"", EdmLabel.EUROPEANA_ID, ClientUtils.escapeQueryChars(rdf.getAbout())); + final String solrQuery = String.format("%s:\"%s\"", EdmLabel.EUROPEANA_ID, + ClientUtils.escapeQueryChars(rdfWrapper.getAbout())); final Map queryParamMap = new HashMap<>(); queryParamMap.put("q", solrQuery); queryParamMap.put("fl", EdmLabel.TIMESTAMP_CREATED + "," + EdmLabel.EUROPEANA_ID); SolrDocumentList solrDocuments = getExistingDocuments(queryParamMap); createdDate = (Date) solrDocuments.stream() .map(document -> document.getFieldValue(EdmLabel.TIMESTAMP_CREATED.toString())) - .collect(Collectors.toList()) - .stream().findFirst().orElse(recordDate); + .toList().stream().findFirst().orElse(recordDate); } setUpdateAndCreateTime(null, fullBean, Pair.of(recordDate, createdDate)); } - publishToSolrFinal(rdf, fullBean); + publishToSolrFinal(rdfWrapper, fullBean); } private SolrDocumentList getExistingDocuments(Map queryParamMap) @@ -278,16 +298,29 @@ private void publishToSolrFinal(RdfWrapper rdf, FullBeanImpl savedFullBean) thro } } - private FullBeanImpl publishToMongo(Date recordDate, FullBeanImpl fullBean, + private FullBeanImpl publishToRecordMongo(Date recordDate, FullBeanImpl fullBean, TriConsumer> fullBeanPreprocessor, List> recordsForRedirection) throws SetupRelatedIndexingException, IndexerRelatedIndexingException, RecordRelatedIndexingException { - // Publish to Mongo + return publishToMongo(recordDate, fullBean, fullBeanPreprocessor, recordsForRedirection, recordDao); + } + + private FullBeanImpl publishToTombstoneMongo(Date recordDate, FullBeanImpl fullBean, + TriConsumer> fullBeanPreprocessor, + List> recordsForRedirection) + throws SetupRelatedIndexingException, IndexerRelatedIndexingException, RecordRelatedIndexingException { + return publishToMongo(recordDate, fullBean, fullBeanPreprocessor, recordsForRedirection, tombstoneRecordDao); + } + + private FullBeanImpl publishToMongo(Date recordDate, FullBeanImpl fullBean, + TriConsumer> fullBeanPreprocessor, + List> recordsForRedirection, RecordDao tombstoneRecordDao) + throws SetupRelatedIndexingException, IndexerRelatedIndexingException, RecordRelatedIndexingException { final FullBeanImpl savedFullBean; try { savedFullBean = new FullBeanUpdater(fullBeanPreprocessor).update(fullBean, recordDate, recordsForRedirection.stream().map(Pair::getValue).min(Comparator.naturalOrder()) - .orElse(null), edmMongoClient); + .orElse(null), tombstoneRecordDao); } catch (MongoIncompatibleDriverException | MongoConfigurationException | MongoSecurityException e) { throw new SetupRelatedIndexingException(MONGO_SERVER_PUBLISH_ERROR, e); } catch (MongoSocketException | MongoClientException | MongoInternalException | MongoInterruptedException e) { @@ -318,13 +351,14 @@ private FullBeanImpl convertRDFToFullBean(RdfWrapper rdf) { return fullBeanConverter.convertRdfToFullBean(rdf); } - private void publishToSolr(RdfWrapper rdf, FullBeanImpl fullBean) throws IndexingException { + private void publishToSolr(RdfWrapper rdfWrapper, FullBeanImpl fullBean) throws IndexingException { // Create Solr document. final SolrDocumentPopulator documentPopulator = new SolrDocumentPopulator(); final SolrInputDocument document = new SolrInputDocument(); documentPopulator.populateWithProperties(document, fullBean); - documentPopulator.populateWithFacets(document, rdf); + documentPopulator.populateWithFacets(document, rdfWrapper); + documentPopulator.populateWithDateRanges(document, rdfWrapper); // Save Solr document. try { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/IndexedRecordAccess.java b/metis-indexing/src/main/java/eu/europeana/indexing/IndexedRecordAccess.java index 870461f0f8..0445cf6b55 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/IndexedRecordAccess.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/IndexedRecordAccess.java @@ -26,33 +26,34 @@ import org.apache.solr.client.solrj.util.ClientUtils; /** - * This class provides functionality for accessing records that are already indexed from the Mongo - * and the Solr data stores. Note that this class does NOT contain functionality for indexing - * records. + * This class provides functionality for accessing records that are already indexed from the Mongo and the Solr data stores. Note + * that this class does NOT contain functionality for indexing records. */ public class IndexedRecordAccess { private static final String ID_FIELD = "_id"; private static final String ABOUT_FIELD = "about"; - private final RecordDao mongoServer; + private final RecordDao recordDao; + private final RecordDao tombstoneDao; private final SolrClient solrServer; /** * Constructor. * - * @param mongoServer The Mongo server connection. + * @param recordDao the mongo dao for connecting with the Mongo records database. + * @param tombstoneDao the mongo dao for connecting with the Mongo tombstone records database. * @param solrServer The Solr server connection. */ - IndexedRecordAccess(RecordDao mongoServer, SolrClient solrServer) { - this.mongoServer = mongoServer; + IndexedRecordAccess(RecordDao recordDao, RecordDao tombstoneDao, SolrClient solrServer) { + this.recordDao = recordDao; + this.tombstoneDao = tombstoneDao; this.solrServer = solrServer; } /** - * Counts the records in a given dataset. The criteria of whether a record belongs to a certain - * dataset is the same as that used in the method {@link #removeDataset(String, Date)}, i.e. it is - * based on the rdf:about values. + * Counts the records in a given dataset. The criteria of whether a record belongs to a certain dataset is the same as that used + * in the method {@link #removeDataset(String, Date)}, i.e. it is based on the rdf:about values. * * @param datasetId The ID of the dataset of which to count the records. Is not null. * @return The number of records encountered for the given dataset. @@ -62,9 +63,28 @@ public long countRecords(String datasetId) { } /** - * Removes the record with the given rdf:about value. Also removes any associated entities (i.e. - * those entities that are always part of only one record and the removal of which can not - * invalidate references from other records): + * Get fullbean from database given an rdf about. + * @param rdfAbout the rdf about + * @return the fullbean + */ + public FullBeanImpl getFullbean(String rdfAbout) { + final Datastore datastore = recordDao.getDatastore(); + return datastore.find(FullBeanImpl.class).filter(Filters.eq(ABOUT_FIELD, rdfAbout)).first(); + } + + /** + * Get fullbean from tombstone database given an rdf about. + * @param rdfAbout the rdf about + * @return the fullbean + */ + public FullBeanImpl getTombstoneFullbean(String rdfAbout) { + final Datastore datastore = tombstoneDao.getDatastore(); + return datastore.find(FullBeanImpl.class).filter(Filters.eq(ABOUT_FIELD, rdfAbout)).first(); + } + + /** + * Removes the record with the given rdf:about value. Also removes any associated entities (i.e. those entities that are always + * part of only one record and the removal of which can not invalidate references from other records): *
    *
  • Aggregation
  • *
  • EuropeanaAggregation
  • @@ -75,7 +95,7 @@ public long countRecords(String datasetId) { * not removed. * * @param rdfAbout The about value of the record to remove. Is not null. - * @return Whether or not the record was removed. + * @return Whether the record was removed. * @throws IndexerRelatedIndexingException In case something went wrong. */ public boolean removeRecord(String rdfAbout) throws IndexerRelatedIndexingException { @@ -83,12 +103,12 @@ public boolean removeRecord(String rdfAbout) throws IndexerRelatedIndexingExcept // Remove Solr record final String queryValue = ClientUtils.escapeQueryChars(rdfAbout); - solrServer.deleteByQuery(EdmLabel.EUROPEANA_ID.toString() + ":" + queryValue); + solrServer.deleteByQuery(EdmLabel.EUROPEANA_ID + ":" + queryValue); // Obtain the Mongo record - final Datastore datastore = mongoServer.getDatastore(); + final Datastore datastore = recordDao.getDatastore(); final FullBeanImpl recordToDelete = datastore.find(FullBeanImpl.class) - .filter(Filters.eq(ABOUT_FIELD, rdfAbout)).first(); + .filter(Filters.eq(ABOUT_FIELD, rdfAbout)).first(); // Remove mongo record and dependencies if (recordToDelete != null) { @@ -111,14 +131,13 @@ public boolean removeRecord(String rdfAbout) throws IndexerRelatedIndexingExcept *

    Removes all records that belong to a given dataset. For details on what parts of the record * are removed, see the documentation of {@link #removeRecord(String)}.

    *

    NOTE that the rdf:about is - * used to find the dependencies, rather than the actual references in the records. While this is - * a reasonably safe way to go for now, eventually a more generic way along the lines of {@link - * #removeRecord(String)} should be found, in which the exact composition of the rdf:about is - * taken out of the equation.

    + * used to find the dependencies, rather than the actual references in the records. While this is a reasonably safe way to go + * for now, eventually a more generic way along the lines of {@link #removeRecord(String)} should be found, in which the exact + * composition of the rdf:about is taken out of the equation.

    * * @param datasetId The ID of the dataset to clear. Is not null. - * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this - * date will be removed. If null is provided then all records from that dataset will be removed. + * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this date will be removed. If null + * is provided then all records from that dataset will be removed. * @return The number of records that were removed. * @throws IndexerRelatedIndexingException In case something went wrong. */ @@ -136,31 +155,31 @@ public long removeDataset(String datasetId, Date maxRecordDate) } /** - * Return all record IDs that belong to the given dataset. For implementation details see {@link - * #removeDataset(String, Date)} as the selection is to be performed analogously. + * Return all record IDs that belong to the given dataset. For implementation details see {@link #removeDataset(String, Date)} + * as the selection is to be performed analogously. * * @param datasetId The ID of the dataset to search. Is not null. - * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this - * date will be included. If null is provided then all records from that dataset are included. + * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this date will be included. If null + * is provided then all records from that dataset are included. * @return The record IDs in a stream. */ public Stream getRecordIds(String datasetId, Date maxRecordDate) { final FindOptions findOptions = new FindOptions() - .projection().exclude(ID_FIELD) - .projection().include(ABOUT_FIELD); + .projection().exclude(ID_FIELD) + .projection().include(ABOUT_FIELD); final Iterator resultIterator = createMongoQuery(datasetId, maxRecordDate) - .iterator(findOptions); + .iterator(findOptions); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(resultIterator, 0), false) - .map(FullBeanImpl::getAbout); + .map(FullBeanImpl::getAbout); } /** - * Count all records that belong to the given dataset. For implementation details see {@link - * #removeDataset(String, Date)} as the selection is to be performed analogously. + * Count all records that belong to the given dataset. For implementation details see {@link #removeDataset(String, Date)} as + * the selection is to be performed analogously. * * @param datasetId The ID of the dataset to search. Is not null. - * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this - * date will be counted. If null is provided then all records from that dataset will be counted. + * @param maxRecordDate The cutoff date: all records that have a lower timestampUpdated than this date will be counted. If null + * is provided then all records from that dataset will be counted. * @return The record IDs in a stream. */ public long countRecords(String datasetId, Date maxRecordDate) { @@ -180,19 +199,19 @@ private void removeDatasetFromSolr(String datasetId, Date maxRecordDate) DateFormat dateFormat = new SimpleDateFormat(CommonStringValues.DATE_FORMAT_Z, Locale.US); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); solrQuery.append(" AND ").append(EdmLabel.TIMESTAMP_UPDATED).append(":[* TO ") - .append(dateFormat.format(maxRecordDate)).append('}'); + .append(dateFormat.format(maxRecordDate)).append('}'); } solrServer.deleteByQuery(solrQuery.toString()); } private long removeDatasetFromMongo(String datasetId, Date maxRecordDate) { return createMongoQuery(datasetId, maxRecordDate).delete(new DeleteOptions().multi(true)) - .getDeletedCount(); + .getDeletedCount(); } private Query createMongoQuery(String datasetId, Date maxRecordDate) { final Pattern pattern = Pattern.compile("^" + Pattern.quote(getRecordIdPrefix(datasetId))); - final Query query = mongoServer.getDatastore().find(FullBeanImpl.class); + final Query query = recordDao.getDatastore().find(FullBeanImpl.class); query.filter(Filters.regex(ABOUT_FIELD).pattern(pattern)); if (maxRecordDate != null) { query.filter(Filters.lt("timestampUpdated", maxRecordDate)); diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/Indexer.java b/metis-indexing/src/main/java/eu/europeana/indexing/Indexer.java index 5d3f0664dd..f729630772 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/Indexer.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/Indexer.java @@ -1,5 +1,7 @@ package eu.europeana.indexing; +import eu.europeana.corelib.solr.bean.impl.FullBeanImpl; +import eu.europeana.metis.utils.DepublicationReason; import java.io.Closeable; import java.io.InputStream; import java.util.Date; @@ -28,18 +30,18 @@ public interface Indexer extends Closeable { /** *

    - * This method indexes a single record, publishing it to the provided data stores. + * This method indexes a single rdf, publishing it to the provided data stores. *

    *

    * NOTE: this operation should not coincide with a remove operation as this operation is * not done within a transaction. *

    * - * @param record The record to index. + * @param rdf The rdf to index. * @param indexingProperties The properties of this indexing operation. * @throws IndexingException In case a problem occurred during indexing. */ - void indexRdf(RDF record, IndexingProperties indexingProperties) throws IndexingException; + void indexRdf(RDF rdf, IndexingProperties indexingProperties) throws IndexingException; /** *

    @@ -58,18 +60,18 @@ public interface Indexer extends Closeable { /** *

    - * This method indexes a single record, publishing it to the provided data stores. + * This method indexes a single rdfString, publishing it to the provided data stores. *

    *

    * NOTE: this operation should not coincide with a remove operation as this operation is * not done within a transaction. *

    * - * @param record The record to index (can be parsed to RDF). + * @param rdfString The rdfString to index (can be parsed to RDF). * @param indexingProperties The properties of this indexing operation. * @throws IndexingException In case a problem occurred during indexing. */ - void index(String record, IndexingProperties indexingProperties) throws IndexingException; + void index(String rdfString, IndexingProperties indexingProperties) throws IndexingException; /** *

    @@ -82,7 +84,10 @@ public interface Indexer extends Closeable { * * @param stringRdfRecord The record to index (can be parsed to RDF). * @param indexingProperties The properties of this indexing operation. - * @param tierResultsConsumer The predicate deciding if the record should be published based on evaluated tier. + * @param tierResultsConsumer The predicate deciding whether the record should be published based + * on the evaluated tier. Note: the tier calculations that are provided + * to the consumer are for provider data only (i.e. mode + * {@link eu.europeana.indexing.tiers.metadata.ClassifierMode#PROVIDER_PROXIES}). * @throws IndexingException In case a problem occurred during indexing. */ void index(String stringRdfRecord, IndexingProperties indexingProperties, @@ -105,18 +110,18 @@ void index(String stringRdfRecord, IndexingProperties indexingProperties, /** *

    - * This method indexes a single record, publishing it to the provided data stores. + * This method indexes a single rdfInputStream, publishing it to the provided data stores. *

    *

    * NOTE: this operation should not coincide with a remove operation as this operation is * not done within a transaction. *

    * - * @param record The record to index (can be parsed to RDF). + * @param rdfInputStream The rdfInputStream to index (can be parsed to RDF). * @param indexingProperties The properties of this indexing operation. * @throws IndexingException In case a problem occurred during indexing. */ - void index(InputStream record, IndexingProperties indexingProperties) throws IndexingException; + void index(InputStream rdfInputStream, IndexingProperties indexingProperties) throws IndexingException; /** *

    @@ -131,7 +136,9 @@ void index(String stringRdfRecord, IndexingProperties indexingProperties, * @param recordContent The record to index (can be parsed to RDF). * @param indexingProperties The properties of this indexing operation. * @throws IndexingException In case a problem occurred during indexing. - * @return A pair with both content tier and metadata tier calculations results of the given record + * @return A pair with both content tier and metadata tier calculations results of the given + * record. The tier calculations are for provider data only (i.e. mode + * {@link eu.europeana.indexing.tiers.metadata.ClassifierMode#PROVIDER_PROXIES}). */ TierResults indexAndGetTierCalculations(InputStream recordContent, IndexingProperties indexingProperties) throws IndexingException; @@ -174,6 +181,23 @@ TierResults indexAndGetTierCalculations(InputStream recordContent, */ boolean remove(String rdfAbout) throws IndexingException; + /** + * Get a tombstone record given an rdf about. + * @param rdfAbout the rdf about + * @return the tombstone record or else null + */ + FullBeanImpl getTombstone(String rdfAbout); + + /** + * Creates and indexes a tombstone record. + * + * @param rdfAbout the id of the record + * @param depublicationReason the depublication reason + * @return whether a record was tombstoned + * @throws IndexingException in case something went wrong. + */ + boolean indexTombstone(String rdfAbout, DepublicationReason depublicationReason) throws IndexingException; + /** *

    * Removes all records that belong to a given dataset. This method also removes the associated diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerFactory.java b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerFactory.java index e1bf6ca270..4c3d566d77 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerFactory.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerFactory.java @@ -27,13 +27,27 @@ public IndexerFactory(IndexingSettings settings) { * caller is responsible for closing the clients. Any indexers created through the {@link * #getIndexer()} method will then no longer work and no new ones can be created. * - * @param mongoClient The Mongo client to use. + * @param recordDao The Mongo dao to use. * @param recordRedirectDao The record redirect dao * @param solrClient The Solr client to use. */ - public IndexerFactory(RecordDao mongoClient, RecordRedirectDao recordRedirectDao, + public IndexerFactory(RecordDao recordDao, RecordRedirectDao recordRedirectDao, SolrClient solrClient) { + this(() -> new ClientsConnectionProvider(recordDao, recordRedirectDao, solrClient)); + } + + /** + * Constructor for setting up a factory using already existing Mongo and Solr clients. Note: the + * caller is responsible for closing the clients. Any indexers created through the {@link + * #getIndexer()} method will then no longer work and no new ones can be created. + * + * @param recordDao The Mongo dao to use. + * @param tombstoneRecordDao The Mongo tombstone dao to use. + * @param recordRedirectDao The record redirect dao + * @param solrClient The Solr client to use. + */ + public IndexerFactory(RecordDao recordDao, RecordDao tombstoneRecordDao, RecordRedirectDao recordRedirectDao, SolrClient solrClient) { - this(() -> new ClientsConnectionProvider(mongoClient, recordRedirectDao, solrClient)); + this(() -> new ClientsConnectionProvider(recordDao, tombstoneRecordDao, recordRedirectDao, solrClient)); } /** @@ -60,7 +74,11 @@ public IndexerFactory(IndexerConnectionSupplier connectionProviderSupplier) { */ public Indexer getIndexer() throws SetupRelatedIndexingException, IndexerRelatedIndexingException { - return new IndexerImpl(connectionProviderSupplier.get()); + try { + return new IndexerImpl(connectionProviderSupplier.get()); + } catch (IllegalArgumentException e) { + throw new SetupRelatedIndexingException("Creating a connection from the supplier failed.", e); + } } /** diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerImpl.java b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerImpl.java index 5e2aa86815..af28c0f7ca 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerImpl.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerImpl.java @@ -1,5 +1,15 @@ package eu.europeana.indexing; +import static java.lang.String.format; +import static java.util.function.Predicate.not; + +import eu.europeana.corelib.definitions.edm.entity.ChangeLog; +import eu.europeana.corelib.definitions.edm.entity.EuropeanaAggregation; +import eu.europeana.corelib.solr.bean.impl.FullBeanImpl; +import eu.europeana.corelib.solr.entity.AggregationImpl; +import eu.europeana.corelib.solr.entity.ChangeLogImpl; +import eu.europeana.corelib.solr.entity.EuropeanaAggregationImpl; +import eu.europeana.corelib.solr.entity.ProxyImpl; import eu.europeana.indexing.exception.IndexerRelatedIndexingException; import eu.europeana.indexing.exception.IndexingException; import eu.europeana.indexing.exception.SetupRelatedIndexingException; @@ -7,22 +17,23 @@ import eu.europeana.indexing.tiers.model.TierResults; import eu.europeana.indexing.utils.RdfWrapper; import eu.europeana.metis.schema.jibx.RDF; +import eu.europeana.metis.utils.DepublicationReason; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Optional; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Stream; import org.apache.solr.client.solrj.SolrServerException; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implementation of {@link Indexer}. - * - * @author jochen */ public class IndexerImpl implements Indexer { @@ -74,10 +85,10 @@ public void index(List records, IndexingProperties indexingProperties) } @Override - public void index(InputStream recordInputStream, IndexingProperties indexingProperties) + public void index(InputStream rdfInputStream, IndexingProperties indexingProperties) throws IndexingException { final StringToFullBeanConverter stringToRdfConverter = stringToRdfConverterSupplier.get(); - indexRdf(stringToRdfConverter.convertToRdf(recordInputStream), indexingProperties); + indexRdf(stringToRdfConverter.convertToRdf(rdfInputStream), indexingProperties); } @Override @@ -86,17 +97,17 @@ public TierResults indexAndGetTierCalculations(InputStream recordContent, final RDF rdfRecord = stringToRdfConverterSupplier.get().convertToRdf(recordContent); final List result = new ArrayList<>(); indexRecords(List.of(rdfRecord), indexingProperties, result::add); - return result.get(0); + return result.getFirst(); } @Override - public void indexRdf(RDF rdfRecord, IndexingProperties indexingProperties) throws IndexingException { - indexRdfs(List.of(rdfRecord), indexingProperties); + public void indexRdf(RDF rdf, IndexingProperties indexingProperties) throws IndexingException { + indexRdfs(List.of(rdf), indexingProperties); } @Override - public void index(String stringRdfRecord, IndexingProperties indexingProperties) throws IndexingException { - index(List.of(stringRdfRecord), indexingProperties); + public void index(String rdfString, IndexingProperties indexingProperties) throws IndexingException { + index(List.of(rdfString), indexingProperties); } @Override @@ -126,6 +137,86 @@ public boolean remove(String rdfAbout) throws IndexerRelatedIndexingException { return this.connectionProvider.getIndexedRecordAccess().removeRecord(rdfAbout); } + @Override + public FullBeanImpl getTombstone(String rdfAbout) { + return this.connectionProvider.getIndexedRecordAccess().getTombstoneFullbean(rdfAbout); + } + + @Override + public boolean indexTombstone(String rdfAbout, DepublicationReason depublicationReason) throws IndexingException { + if (depublicationReason == DepublicationReason.LEGACY) { + throw new IndexerRelatedIndexingException( + format("Depublication reason %s, is not allowed", depublicationReason)); + } + final FullBeanImpl publishedFullbean = this.connectionProvider.getIndexedRecordAccess().getFullbean(rdfAbout); + if (publishedFullbean != null) { + final FullBeanPublisher publisher = connectionProvider.getFullBeanPublisher(true); + final FullBeanImpl tombstoneFullbean = prepareTombstoneFullbean(publishedFullbean, depublicationReason); + try { + publisher.publishTombstone(tombstoneFullbean, tombstoneFullbean.getTimestampCreated()); + } catch (IndexingException e) { + throw new IndexerRelatedIndexingException("Could not create tombstone record '" + rdfAbout + "'.", e); + } + } + return publishedFullbean != null; + } + + private FullBeanImpl prepareTombstoneFullbean(FullBeanImpl publishedFullbean, DepublicationReason depublicationReason) { + final FullBeanImpl tombstoneFullbean = new FullBeanImpl(); + tombstoneFullbean.setAbout(publishedFullbean.getAbout()); + tombstoneFullbean.setTimestampCreated(publishedFullbean.getTimestampCreated()); + tombstoneFullbean.setTimestampUpdated(publishedFullbean.getTimestampUpdated()); + + tombstoneFullbean.setEuropeanaAggregation( + prepareEuropeanaAggregation(publishedFullbean.getEuropeanaAggregation(), depublicationReason)); + tombstoneFullbean.setAggregations(List.of(prepareAggregation(publishedFullbean.getAggregations().getFirst()))); + final Optional providerProxy = + publishedFullbean.getProxies().stream().filter(not(ProxyImpl::isEuropeanaProxy)).findFirst(); + providerProxy.ifPresent(proxy -> tombstoneFullbean.setProxies(List.of(prepareProxy(proxy)))); + return tombstoneFullbean; + } + + private static EuropeanaAggregation prepareEuropeanaAggregation(EuropeanaAggregation europeanaAggregation, + DepublicationReason depublicationReason) { + + final ChangeLog tombstoneChangeLog = new ChangeLogImpl(); + tombstoneChangeLog.setType("Delete"); + tombstoneChangeLog.setContext(depublicationReason.getUrl()); + tombstoneChangeLog.setEndTime(new Date()); + final EuropeanaAggregation tombstoneEuropeanaAggregation = new EuropeanaAggregationImpl(); + tombstoneEuropeanaAggregation.setAbout(europeanaAggregation.getAbout()); + tombstoneEuropeanaAggregation.setChangeLog(List.of(tombstoneChangeLog)); + tombstoneEuropeanaAggregation.setEdmPreview(europeanaAggregation.getEdmPreview()); + return tombstoneEuropeanaAggregation; + } + + private static @NotNull AggregationImpl prepareAggregation(AggregationImpl aggregation) { + final AggregationImpl tombstoneAggregation = new AggregationImpl(); + tombstoneAggregation.setAbout(aggregation.getAbout()); + tombstoneAggregation.setEdmDataProvider(aggregation.getEdmDataProvider()); + tombstoneAggregation.setEdmProvider(aggregation.getEdmProvider()); + tombstoneAggregation.setEdmObject(aggregation.getEdmObject()); + tombstoneAggregation.setEdmIntermediateProvider(aggregation.getEdmIntermediateProvider()); + tombstoneAggregation.setEdmIsShownAt(aggregation.getEdmIsShownAt()); + tombstoneAggregation.setEdmIsShownBy(aggregation.getEdmIsShownBy()); + return tombstoneAggregation; + } + + private static @NotNull ProxyImpl prepareProxy(ProxyImpl providerProxy) { + final ProxyImpl tombstoneProviderProxy = new ProxyImpl(); + tombstoneProviderProxy.setAbout(providerProxy.getAbout()); + tombstoneProviderProxy.setEuropeanaProxy(false); + tombstoneProviderProxy.setDcTitle(providerProxy.getDcTitle()); + tombstoneProviderProxy.setDcDescription(providerProxy.getDcDescription()); + tombstoneProviderProxy.setDcIdentifier(providerProxy.getDcIdentifier()); + tombstoneProviderProxy.setDcCreator(providerProxy.getDcCreator()); + tombstoneProviderProxy.setDcContributor(providerProxy.getDcContributor()); + tombstoneProviderProxy.setEdmRights(providerProxy.getEdmRights()); + tombstoneProviderProxy.setDcRights(providerProxy.getDcRights()); + tombstoneProviderProxy.setDctermsIsReferencedBy(providerProxy.getDctermsIsReferencedBy()); + return tombstoneProviderProxy; + } + @Override public int removeAll(String datasetId, Date maxRecordDate) throws IndexerRelatedIndexingException { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerPreprocessor.java b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerPreprocessor.java index 4ba8e00d2d..97b23604e7 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/IndexerPreprocessor.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/IndexerPreprocessor.java @@ -16,7 +16,7 @@ /** * The type Indexer preprocessor. */ -public class IndexerPreprocessor { +public final class IndexerPreprocessor { private static final TierClassifier mediaClassifier = ClassifierFactory.getMediaClassifier(); @@ -33,7 +33,7 @@ private IndexerPreprocessor() { * * @param rdf the rdf * @param properties the properties - * @return the tier results + * @return the tier results for the provider data (i.e. {@link ClassifierMode#PROVIDER_PROXIES}). * @throws IndexingException the indexing exception */ public static TierResults preprocessRecord(RDF rdf, IndexingProperties properties) @@ -41,21 +41,21 @@ public static TierResults preprocessRecord(RDF rdf, IndexingProperties propertie // Perform the tier classification final RdfWrapper rdfWrapper = new RdfWrapper(rdf); - TierResults tierCalculationsResult = new TierResults(); if (properties.isPerformTierCalculation() && properties.getTypesEnabledForTierCalculation() .contains(rdfWrapper.getEdmType())) { - tierCalculationsResult = new TierResults(mediaClassifier.classify(rdfWrapper), - metadataClassifier.classify(rdfWrapper)); - RdfTierUtils.setTier(rdf, tierCalculationsResult.getMediaTier()); - RdfTierUtils.setTier(rdf, tierCalculationsResult.getMetadataTier()); - - tierCalculationsResult = new TierResults(mediaClassifier.classify(rdfWrapper), - metadataClassifierEuropeana.classify(rdfWrapper)); - RdfTierUtils.setTierEuropeana(rdf, tierCalculationsResult.getMediaTier()); - RdfTierUtils.setTierEuropeana(rdf, tierCalculationsResult.getMetadataTier()); + final TierResults tierCalculationsResultProvidedData = new TierResults( + mediaClassifier.classify(rdfWrapper), metadataClassifier.classify(rdfWrapper)); + RdfTierUtils.setTier(rdf, tierCalculationsResultProvidedData.getMediaTier()); + RdfTierUtils.setTier(rdf, tierCalculationsResultProvidedData.getMetadataTier()); + + final TierResults tierCalculationsResultEuropeana = new TierResults( + mediaClassifier.classify(rdfWrapper), metadataClassifierEuropeana.classify(rdfWrapper)); + RdfTierUtils.setTierEuropeana(rdf, tierCalculationsResultEuropeana.getMediaTier()); + RdfTierUtils.setTierEuropeana(rdf, tierCalculationsResultEuropeana.getMetadataTier()); + + return tierCalculationsResultProvidedData; } - return tierCalculationsResult; + return new TierResults(); } - } diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/IndexingSettings.java b/metis-indexing/src/main/java/eu/europeana/indexing/IndexingSettings.java index 3eb4a4cb9c..bf3db0d192 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/IndexingSettings.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/IndexingSettings.java @@ -1,7 +1,6 @@ package eu.europeana.indexing; import static eu.europeana.indexing.utils.IndexingSettingsUtils.nonNullFieldName; -import static eu.europeana.indexing.utils.IndexingSettingsUtils.nonNullMessage; import com.mongodb.ServerAddress; import eu.europeana.indexing.exception.SetupRelatedIndexingException; @@ -22,6 +21,7 @@ public final class IndexingSettings { // Mongo settings private String mongoDatabaseName; + private String mongoTombstoneDatabaseName; private String recordRedirectDatabaseName; private final MongoProperties mongoProperties = new MongoProperties<>( SetupRelatedIndexingException::new); @@ -50,6 +50,10 @@ public void setMongoDatabaseName(String mongoDatabaseName) throws SetupRelatedIn this.mongoDatabaseName = nonNullFieldName(mongoDatabaseName, "mongoDatabaseName"); } + public void setMongoTombstoneDatabaseName(String mongoTombstoneDatabaseName) throws SetupRelatedIndexingException { + this.mongoTombstoneDatabaseName = nonNullFieldName(mongoTombstoneDatabaseName, "mongoTombstoneDatabaseName"); + } + public void setRecordRedirectDatabaseName(String recordRedirectDatabaseName) throws SetupRelatedIndexingException { this.recordRedirectDatabaseName = nonNullFieldName(recordRedirectDatabaseName, @@ -183,13 +187,21 @@ public List getMongoHosts() throws SetupRelatedIndexingException } /** - * This method returns the Mongo database name. + * Returns the Mongo database name. * * @return The Mongo database name. - * @throws SetupRelatedIndexingException In case no Mongo database name was set. */ - public String getMongoDatabaseName() throws SetupRelatedIndexingException { - return nonNullMessage(mongoDatabaseName, "Please provide a Mongo database name."); + public String getMongoDatabaseName() { + return mongoDatabaseName; + } + + /** + * Returns the Mongo tombstone database name. + * + * @return The Mongo tombstone database name. + */ + public String getMongoTombstoneDatabaseName() { + return mongoTombstoneDatabaseName; } /** diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/RecordRedirectsUtil.java b/metis-indexing/src/main/java/eu/europeana/indexing/RecordRedirectsUtil.java index 9a1cac9f18..1af3c94f5f 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/RecordRedirectsUtil.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/RecordRedirectsUtil.java @@ -37,9 +37,6 @@ /** * Utilities class to assist record redirects logic. *

    Not to be instantiated

    - * - * @author Simon Tzanakis (Simon.Tzanakis@europeana.eu) - * @since 2020-02-11 */ public final class RecordRedirectsUtil { @@ -192,10 +189,9 @@ private static String generateQueryForMatchingFields(RdfWrapper rdfWrapper, Map> thirdMapOfLists) { //Collect all required information for heuristics final List identifiers = rdfWrapper.getProviderProxyIdentifiers().stream() - .map(Identifier::getString).filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + .map(Identifier::getString).filter(StringUtils::isNotBlank).toList(); final List titles = rdfWrapper.getProviderProxyTitles().stream().map(Title::getString) - .filter(StringUtils::isNotBlank).collect(Collectors.toList()); + .filter(StringUtils::isNotBlank).toList(); final List descriptions = rdfWrapper.getProviderProxyDescriptions().stream() .map(description -> { if (StringUtils.isNotBlank(description.getString())) { @@ -205,10 +201,10 @@ private static String generateQueryForMatchingFields(RdfWrapper rdfWrapper, return description.getResource().getResource(); } return null; - }).filter(Objects::nonNull).collect(Collectors.toList()); + }).filter(Objects::nonNull).toList(); final List isShownByList = rdfWrapper.getIsShownByList().stream() .map(IsShownBy::getResource).filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + .toList(); //Create all lists that need to be combined firstMapOfLists.putAll(createFirstCombinationGroup(identifiers, titles, descriptions)); @@ -273,7 +269,7 @@ private static HashMap> createThirdCombinationGroup( private static String generateQueryForFields(Map> listsToCombine) { final List items = listsToCombine.entrySet().stream() .map(entry -> generateOrOperationFromList(entry.getKey(), entry.getValue())) - .filter(StringUtils::isNotBlank).collect(Collectors.toList()); + .filter(StringUtils::isNotBlank).toList(); return computeJoiningQuery(items, UnaryOperator.identity(), Collectors.joining(" AND ", "(", ")")); } @@ -302,7 +298,7 @@ private static String computeJoiningQuery(List filteredItems, } private static List getFilteredItems(List items) { - return items.stream().filter(StringUtils::isNotBlank).collect(Collectors.toList()); + return items.stream().filter(StringUtils::isNotBlank).toList(); } private static Pair> generateQueryForDatasetIds( @@ -315,7 +311,7 @@ private static Pair> generateQueryForDatasetIds( concatenatedDatasetRecordIds = filteredItems.stream().map( datasetIdForRedirection -> String.format("\"/%s/%s\"", datasetIdForRedirection, recordId)) - .collect(Collectors.toList()); + .toList(); combinedQueryForRedirectedDatasetIds = computeJoiningQuery(concatenatedDatasetRecordIds, UnaryOperator.identity(), diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/SettingsConnectionProvider.java b/metis-indexing/src/main/java/eu/europeana/indexing/SettingsConnectionProvider.java index 22e36be1ae..35378e3818 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/SettingsConnectionProvider.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/SettingsConnectionProvider.java @@ -1,6 +1,7 @@ package eu.europeana.indexing; import static eu.europeana.indexing.utils.IndexingSettingsUtils.nonNullMessage; +import static org.apache.commons.lang3.StringUtils.isNotBlank; import com.mongodb.MongoConfigurationException; import com.mongodb.MongoIncompatibleDriverException; @@ -16,15 +17,14 @@ import eu.europeana.metis.solr.connection.SolrClientProvider; import java.io.IOException; import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class is an implementation of {@link AbstractConnectionProvider} that sets up the connection - * using an {@link IndexingSettings} object. Various methods are made public so that this class may - * be constructed and used outside the scope of the indexing library. + * This class is an implementation of {@link AbstractConnectionProvider} that sets up the connection using an + * {@link IndexingSettings} object. Various methods are made public so that this class may be constructed and used outside the + * scope of the indexing library. * * @author jochen */ @@ -37,6 +37,7 @@ public final class SettingsConnectionProvider implements AbstractConnectionProvi private final CompoundSolrClient solrClient; private final MongoClient mongoClient; private final RecordDao recordDao; + private final RecordDao tombstoneRecordDao; private final RecordRedirectDao recordRedirectDao; /** @@ -46,9 +47,10 @@ public final class SettingsConnectionProvider implements AbstractConnectionProvi * @throws SetupRelatedIndexingException In case the connections could not be set up. * @throws IndexerRelatedIndexingException In case the connection could not be established. */ - public SettingsConnectionProvider(IndexingSettings settings) throws SetupRelatedIndexingException, IndexerRelatedIndexingException { + public SettingsConnectionProvider(IndexingSettings settings) + throws SetupRelatedIndexingException, IndexerRelatedIndexingException { // Sanity check - settings = nonNullMessage(settings,"The provided settings object is null."); + settings = nonNullMessage(settings, "The provided settings object is null."); // Create Solr and Zookeeper connections. this.solrClient = new SolrClientProvider<>(settings.getSolrProperties()).createSolrClient(); @@ -56,7 +58,9 @@ public SettingsConnectionProvider(IndexingSettings settings) throws SetupRelated // Create mongo connection. try { this.mongoClient = createMongoClient(settings); - this.recordDao = setUpEdmMongoConnection(settings, this.mongoClient); + this.recordDao = setUpEdmMongoConnection(settings.getMongoDatabaseName(), this.mongoClient); + this.tombstoneRecordDao = isNotBlank(settings.getMongoTombstoneDatabaseName()) ? + setUpEdmMongoConnection(settings.getMongoTombstoneDatabaseName(), this.mongoClient) : null; this.recordRedirectDao = setUpRecordRedirectDaoConnection(settings, this.mongoClient); } catch (MongoIncompatibleDriverException | MongoConfigurationException | MongoSecurityException e) { throw new SetupRelatedIndexingException(MONGO_SERVER_SETUP_ERROR, e); @@ -66,12 +70,12 @@ public SettingsConnectionProvider(IndexingSettings settings) throws SetupRelated } private static MongoClient createMongoClient(IndexingSettings settings) throws SetupRelatedIndexingException { - // Perform logging unecessary + // Perform logging unnecessary if (LOGGER.isInfoEnabled()) { LOGGER.info( "Connecting to Mongo hosts: [{}], database [{}], with{} authentication, with{} SSL. ", settings.getMongoProperties().getMongoHosts().stream().map(ServerAddress::toString) - .collect(Collectors.joining(", ")), + .collect(Collectors.joining(", ")), settings.getMongoDatabaseName(), settings.getMongoProperties().getMongoCredentials() == null ? "out" : "", settings.getMongoProperties().mongoEnableSsl() ? "" : "out"); @@ -81,12 +85,12 @@ private static MongoClient createMongoClient(IndexingSettings settings) throws S return new MongoClientProvider<>(settings.getMongoProperties()).createMongoClient(); } - private static RecordDao setUpEdmMongoConnection(IndexingSettings settings, MongoClient client) + private static RecordDao setUpEdmMongoConnection(String databaseName, MongoClient client) throws SetupRelatedIndexingException { try { - return new RecordDao(client, settings.getMongoDatabaseName()); + return new RecordDao(client, databaseName); } catch (RuntimeException e) { - throw new SetupRelatedIndexingException("Could not set up mongo server.", e); + throw new SetupRelatedIndexingException(MONGO_SERVER_SETUP_ERROR, e); } } @@ -94,12 +98,12 @@ private static RecordRedirectDao setUpRecordRedirectDaoConnection(IndexingSettin MongoClient client) throws SetupRelatedIndexingException { try { RecordRedirectDao recordRedirectDao = null; - if (StringUtils.isNotBlank(settings.getRecordRedirectDatabaseName())) { + if (isNotBlank(settings.getRecordRedirectDatabaseName())) { recordRedirectDao = new RecordRedirectDao(client, settings.getRecordRedirectDatabaseName()); } return recordRedirectDao; } catch (RuntimeException e) { - throw new SetupRelatedIndexingException("Could not set up mongo server.", e); + throw new SetupRelatedIndexingException(MONGO_SERVER_SETUP_ERROR, e); } } @@ -113,6 +117,11 @@ public RecordDao getRecordDao() { return recordDao; } + @Override + public RecordDao getTombstoneRecordDao() { + return tombstoneRecordDao; + } + @Override public RecordRedirectDao getRecordRedirectDao() { return recordRedirectDao; diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/FieldInputUtils.java b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/FieldInputUtils.java index 3eb7bec0cb..85e089ef07 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/FieldInputUtils.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/FieldInputUtils.java @@ -15,13 +15,12 @@ import java.util.Objects; import java.util.Optional; import java.util.function.Function; -import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; /** - * Class with utility methods for converting an instance of {@link eu.europeana.metis.schema.jibx.RDF} - * to an instance of {@link eu.europeana.metis.schema.edm.beans.FullBean}. + * Class with utility methods for converting fields of {@link eu.europeana.metis.schema.jibx.RDF} + * to fields of {@link eu.europeana.corelib.definitions.edm.beans.FullBean}. */ final class FieldInputUtils { @@ -68,7 +67,7 @@ private static Map> createMapFromList(List final Stream values = Optional.ofNullable(listItem).map(valuesGetter) .filter(valueList -> !valueList.isEmpty()).stream().flatMap(Collection::stream); final List filteredValues = values.filter(Objects::nonNull).map(String::trim) - .filter(StringUtils::isNotEmpty).collect(Collectors.toList()); + .filter(StringUtils::isNotEmpty).toList(); // If there are values to add, we add them to the map. if (!filteredValues.isEmpty()) { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/RdfToFullBeanConverter.java b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/RdfToFullBeanConverter.java index 9bd1a07f97..0335b92b70 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/RdfToFullBeanConverter.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/RdfToFullBeanConverter.java @@ -43,7 +43,7 @@ private static Date convertToDate(String dateString) { private static List convertList(List sourceList, Function converter, boolean returnNullIfEmpty) { - final List result = sourceList.stream().map(converter).collect(Collectors.toList()); + final List result = sourceList.stream().map(converter).toList(); if (result.isEmpty() && returnNullIfEmpty) { return null; } @@ -79,32 +79,32 @@ private static List getQualityAnnotations(RdfWrapper rdfWrapp /** * Converts an RDF to Full Bean. * - * @param record The RDF record to convert. + * @param rdfWrapper The RDF record to convert. * @return The Full Bean. */ - public FullBeanImpl convertRdfToFullBean(RdfWrapper record) { + public FullBeanImpl convertRdfToFullBean(RdfWrapper rdfWrapper) { // Create full bean and set about value. final FullBeanImpl fullBean = new FullBeanImpl(); - fullBean.setAbout(record.getAbout()); + fullBean.setAbout(rdfWrapper.getAbout()); // Set list properties. - fullBean.setProvidedCHOs(convertList(record.getProvidedCHOs(), new ProvidedCHOFieldInput(), false)); - fullBean.setProxies(convertList(record.getProxies(), new ProxyFieldInput(), false)); - fullBean.setAggregations(convertAggregations(record)); - fullBean.setConcepts(convertList(record.getConcepts(), new ConceptFieldInput(), false)); - fullBean.setPlaces(convertList(record.getPlaces(), new PlaceFieldInput(), false)); - fullBean.setTimespans(convertList(record.getTimeSpans(), new TimespanFieldInput(), false)); - fullBean.setAgents(convertList(record.getAgents(), new AgentFieldInput(), false)); - fullBean.setOrganizations(convertList(record.getOrganizations(), new OrganizationFieldInput(), false)); - fullBean.setLicenses(convertList(record.getLicenses(), new LicenseFieldInput(), false)); - fullBean.setServices(convertList(record.getServices(), new ServiceFieldInput(), false)); - var qualityAnnotationsList = convertList(getQualityAnnotations(record), new QualityAnnotationFieldInput(), false); + fullBean.setProvidedCHOs(convertList(rdfWrapper.getProvidedCHOs(), new ProvidedCHOFieldInput(), false)); + fullBean.setProxies(convertList(rdfWrapper.getProxies(), new ProxyFieldInput(), false)); + fullBean.setAggregations(convertAggregations(rdfWrapper)); + fullBean.setConcepts(convertList(rdfWrapper.getConcepts(), new ConceptFieldInput(), false)); + fullBean.setPlaces(convertList(rdfWrapper.getPlaces(), new PlaceFieldInput(), false)); + fullBean.setTimespans(convertList(rdfWrapper.getTimeSpans(), new TimespanFieldInput(), false)); + fullBean.setAgents(convertList(rdfWrapper.getAgents(), new AgentFieldInput(), false)); + fullBean.setOrganizations(convertList(rdfWrapper.getOrganizations(), new OrganizationFieldInput(), false)); + fullBean.setLicenses(convertList(rdfWrapper.getLicenses(), new LicenseFieldInput(), false)); + fullBean.setServices(convertList(rdfWrapper.getServices(), new ServiceFieldInput(), false)); + var qualityAnnotationsList = convertList(getQualityAnnotations(rdfWrapper), new QualityAnnotationFieldInput(), false); fullBean.setQualityAnnotations(qualityAnnotationsList); // Set properties related to the Europeana aggregation - fullBean.setEuropeanaCollectionName(new String[]{record.getDatasetName()}); - final Optional europeanaAggregation = record + fullBean.setEuropeanaCollectionName(new String[]{rdfWrapper.getDatasetName()}); + final Optional europeanaAggregation = rdfWrapper .getEuropeanaAggregation(); fullBean.setEuropeanaAggregation( europeanaAggregation.map(new EuropeanaAggregationFieldInput()).orElse(null)); @@ -120,21 +120,21 @@ public FullBeanImpl convertRdfToFullBean(RdfWrapper record) { return fullBean; } - private List convertAggregations(RdfWrapper record) { + private List convertAggregations(RdfWrapper rdfWrapper) { //The record web resources is reduced every time one of it's web resources gets referenced //We only keep the first web resource out of duplicate web resources with the same about value - final Map recordWebResourcesMap = new WebResourcesExtractor(record) + final Map recordWebResourcesMap = new WebResourcesExtractor(rdfWrapper) .get().stream().collect(Collectors.toMap(WebResourceImpl::getAbout, Function.identity(), (existing, replacement) -> existing)); //The reference list is being extended every time a new web resource is referenced from an aggregator final Set referencedWebResourceAbouts = new HashSet<>(recordWebResourcesMap.size()); //We first convert the provider aggregations because we want this aggregator to get first get matches of web resources - final List providerAggregations = convertList(record.getProviderAggregations(), + final List providerAggregations = convertList(rdfWrapper.getProviderAggregations(), new AggregationFieldInput(recordWebResourcesMap, referencedWebResourceAbouts), false); //Convert the aggregator aggregations final List aggregatorAggregations = convertList( - record.getAggregatorAggregations(), + rdfWrapper.getAggregatorAggregations(), new AggregationFieldInput(recordWebResourcesMap, referencedWebResourceAbouts), false); //We choose to add leftovers on the first provider aggregation @@ -149,30 +149,29 @@ private List convertAggregations(RdfWrapper record) { //Combine aggregation lists return Stream.of(providerAggregations, aggregatorAggregations).filter(Objects::nonNull) - .flatMap(List::stream).map(Aggregation.class::cast).collect(Collectors.toList()); + .flatMap(List::stream).map(Aggregation.class::cast).toList(); } private static class WebResourcesExtractor implements Supplier> { - private final RdfWrapper record; + private final RdfWrapper rdfWrapper; private List webResources; - public WebResourcesExtractor(RdfWrapper record) { - this.record = record; + public WebResourcesExtractor(RdfWrapper rdfWrapper) { + this.rdfWrapper = rdfWrapper; } @Override public List get() { if (webResources == null) { - final Collection webResourcesBeforeConversion = record.getWebResources() - .stream().collect( + final Collection webResourcesBeforeConversion = rdfWrapper.getWebResources() + .stream().collect( Collectors.toMap(WebResourceType::getAbout, UnaryOperator.identity(), (first, second) -> first)).values(); if (webResourcesBeforeConversion.isEmpty()) { webResources = Collections.emptyList(); } else { - webResources = webResourcesBeforeConversion.stream().map(new WebResourceFieldInput()) - .collect(Collectors.toList()); + webResources = webResourcesBeforeConversion.stream().map(new WebResourceFieldInput()).toList(); } } return Collections.unmodifiableList(webResources); diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/WebResourceFieldInput.java b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/WebResourceFieldInput.java index 2f4eeca4a9..47a9a79763 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/WebResourceFieldInput.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/fullbean/WebResourceFieldInput.java @@ -12,6 +12,7 @@ import eu.europeana.metis.schema.jibx.ColorSpaceType; import eu.europeana.metis.schema.jibx.DoubleType; import eu.europeana.metis.schema.jibx.Duration; +import eu.europeana.metis.schema.jibx.EdmType; import eu.europeana.metis.schema.jibx.HasColorSpace; import eu.europeana.metis.schema.jibx.HasMimeType; import eu.europeana.metis.schema.jibx.HexBinaryType; @@ -20,6 +21,7 @@ import eu.europeana.metis.schema.jibx.NonNegativeIntegerType; import eu.europeana.metis.schema.jibx.OrientationType; import eu.europeana.metis.schema.jibx.Type1; +import eu.europeana.metis.schema.jibx.Type2; import eu.europeana.metis.schema.jibx.WebResourceType; import eu.europeana.metis.schema.model.MediaType; import eu.europeana.metis.schema.model.Orientation; @@ -131,16 +133,32 @@ public WebResourceImpl apply(WebResourceType wResourceType) { webResource.setEdmPreview(wResourceType.getPreview().getResource()); } - webResource.setWebResourceMetaInfo(createWebresourceMetaInfo(wResourceType)); + webResource.setWebResourceMetaInfo(createWebResourceMetaInfo(wResourceType)); return webResource; } - private WebResourceMetaInfoImpl createWebresourceMetaInfo(WebResourceType webResource) { + private WebResourceMetaInfoImpl createWebResourceMetaInfo(WebResourceType webResource) { // Get the media type and determine meta data creator - final MediaType mediaType = Optional.ofNullable(webResource.getHasMimeType()) - .map(HasMimeType::getHasMimeType).map(MediaType::getMediaType).orElse(MediaType.OTHER); + final Optional optionalHasMimeType = Optional.ofNullable(webResource.getHasMimeType()) + .map(HasMimeType::getHasMimeType); + + final MediaType mediaType = optionalHasMimeType.map(hasMimeType -> { + MediaType adaptedMediaType = MediaType.getMediaType(hasMimeType); + final EdmType edmType = Optional.ofNullable(webResource.getType1()).map(Type2::getType).orElse(null); + final boolean isOembedMimeType = hasMimeType.startsWith("application/xml+oembed") || hasMimeType.startsWith("application/json+oembed"); + final boolean isPossibleOembedMediaType = adaptedMediaType == MediaType.TEXT || adaptedMediaType == MediaType.OTHER; + if (isPossibleOembedMediaType && edmType != null && isOembedMimeType) { + if (edmType == EdmType.IMAGE) { + adaptedMediaType = MediaType.IMAGE; + } else if (edmType == EdmType.VIDEO) { + adaptedMediaType = MediaType.VIDEO; + } + } + + return adaptedMediaType; + }).orElse(MediaType.OTHER); final BiConsumer metaDataCreator = switch (mediaType) { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdater.java b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdater.java index 39da187488..da4c4f63b9 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdater.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdater.java @@ -29,5 +29,6 @@ protected void update(MongoPropertyUpdater propertyUpd propertyUpdater.updateArray("aggregates", EuropeanaAggregation::getAggregates); propertyUpdater.updateWebResources("webResources", EuropeanaAggregation::getWebResources, ancestorInformation, new WebResourceUpdater()); + propertyUpdater.updateObjectList("changeLog", EuropeanaAggregation::getChangeLog); } } diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoConnectionProvider.java b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoConnectionProvider.java index 2d67920013..ca0cf29821 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoConnectionProvider.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoConnectionProvider.java @@ -16,6 +16,7 @@ import eu.europeana.metis.mongo.dao.RecordRedirectDao; import java.lang.invoke.MethodHandles; import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,6 +33,7 @@ public final class MongoConnectionProvider implements AbstractConnectionProvider private final MongoClient mongoClient; private final RecordDao recordDao; + private RecordDao tombstoneRecordDao; private final RecordRedirectDao recordRedirectDao; /** @@ -48,6 +50,9 @@ public MongoConnectionProvider(MongoIndexingSettings settings) throws SetupRelat try { this.mongoClient = createMongoClient(settings); this.recordDao = new RecordDao(this.mongoClient, nonNullFieldName(settings.getMongoDatabaseName(), "mongoDatabaseName")); + if (StringUtils.isNotBlank(settings.getMongoTombstoneDatabaseName())) { + this.tombstoneRecordDao = new RecordDao(this.mongoClient, settings.getMongoTombstoneDatabaseName()); + } this.recordRedirectDao = new RecordRedirectDao(this.mongoClient, nonNullFieldName(settings.getRecordRedirectDatabaseName(), "recordRedirectDatabaseName")); } catch (MongoIncompatibleDriverException | MongoConfigurationException | MongoSecurityException e) { @@ -79,6 +84,11 @@ public RecordDao getRecordDao() { return recordDao; } + @Override + public RecordDao getTombstoneRecordDao() { + return tombstoneRecordDao; + } + @Override public RecordRedirectDao getRecordRedirectDao() { return recordRedirectDao; diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoIndexingSettings.java b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoIndexingSettings.java index c041cf4a8f..0f49562ae7 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoIndexingSettings.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/MongoIndexingSettings.java @@ -14,6 +14,7 @@ public class MongoIndexingSettings implements SettingsHolder { private String mongoDatabaseName; + private String mongoTombstoneDatabaseName; private String recordRedirectDatabaseName; private final MongoProperties mongoProperties; private IndexingProperties indexingProperties; @@ -46,6 +47,24 @@ public void setMongoDatabaseName(String mongoDatabaseName) { this.mongoDatabaseName = mongoDatabaseName; } + /** + * Gets mongo database name. + * + * @return the mongo tombstone database name + */ + public String getMongoTombstoneDatabaseName() { + return mongoTombstoneDatabaseName; + } + + /** + * Sets mongo tombstone database name. + * + * @param mongoTombstoneDatabaseName the mongo tombstone database name + */ + public void setMongoTombstoneDatabaseName(String mongoTombstoneDatabaseName) { + this.mongoTombstoneDatabaseName = mongoTombstoneDatabaseName; + } + /** * Gets record redirect database name. * diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/property/MongoPropertyUpdaterImpl.java b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/property/MongoPropertyUpdaterImpl.java index ffadd794ba..79fa8b5ee0 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/mongo/property/MongoPropertyUpdaterImpl.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/mongo/property/MongoPropertyUpdaterImpl.java @@ -59,8 +59,7 @@ class MongoPropertyUpdaterImpl implements MongoPropertyUpdater { this.current = current; this.updated = updated; this.mongoServer = mongoServer; - this.updateOperators = Optional.ofNullable(updateOperators).stream().flatMap(Collection::stream) - .collect(Collectors.toList()); + this.updateOperators = Optional.ofNullable(updateOperators).stream().flatMap(Collection::stream).collect(Collectors.toList()); this.queryCreator = queryCreator; } @@ -170,8 +169,7 @@ public void updateWebResources(String updateField, } private static List castWebResourceList(List input) { - return input == null ? null : input.stream().map(WebResourceImpl.class::cast) - .collect(Collectors.toList()); + return input == null ? null : input.stream().map(WebResourceImpl.class::cast).toList(); } @Override diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/EdmLabel.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/EdmLabel.java index ef0131b64e..33c7a8b271 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/EdmLabel.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/EdmLabel.java @@ -13,8 +13,16 @@ public enum EdmLabel { EUROPEANA_COLLECTIONNAME("europeana_collectionName"), EUROPEANA_ID("europeana_id"), TIMESTAMP_CREATED("timestamp_created"), - TIMESTAMP_UPDATED("timestamp_update"), - + TIMESTAMP_UPDATED("timestamp_update"), + + //DATE RANGES + CREATED_DATE("created_date"), + ISSUED_DATE("issued_date"), + CREATED_DATE_BEGIN("created_date_begin"), + CREATED_DATE_END("created_date_end"), + ISSUED_DATE_BEGIN("issued_date_begin"), + ISSUED_DATE_END("issued_date_end"), + // CRF Fields FACET_HAS_THUMBNAILS("has_thumbnails"), FACET_HAS_LANDING_PAGE("has_landingpage"), diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrConnectionProvider.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrConnectionProvider.java index aad7cc5f0e..cd78a86e90 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrConnectionProvider.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrConnectionProvider.java @@ -38,6 +38,11 @@ public RecordDao getRecordDao() { return null; } + @Override + public RecordDao getTombstoneRecordDao() { + return null; + } + @Override public RecordRedirectDao getRecordRedirectDao() { return null; diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrDocumentPopulator.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrDocumentPopulator.java index 65d1b5d12f..22f5e9889f 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrDocumentPopulator.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/SolrDocumentPopulator.java @@ -1,5 +1,14 @@ package eu.europeana.indexing.solr; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE_BEGIN; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE_END; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE_BEGIN; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE_END; +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; import static java.util.function.Predicate.not; import eu.europeana.corelib.definitions.edm.entity.QualityAnnotation; @@ -24,7 +33,15 @@ import eu.europeana.indexing.utils.RdfWrapper; import eu.europeana.indexing.utils.WebResourceLinkType; import eu.europeana.indexing.utils.WebResourceWrapper; +import eu.europeana.metis.schema.jibx.Begin; +import eu.europeana.metis.schema.jibx.End; +import eu.europeana.metis.schema.jibx.EuropeanaType.Choice; +import eu.europeana.metis.schema.jibx.ProxyType; +import eu.europeana.metis.schema.jibx.ResourceOrLiteralType; +import eu.europeana.metis.schema.jibx.ResourceOrLiteralType.Resource; +import eu.europeana.metis.schema.jibx.TimeSpanType; import eu.europeana.metis.schema.model.MediaType; +import java.time.LocalDate; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; @@ -33,6 +50,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -62,29 +80,35 @@ public void populateWithProperties(SolrInputDocument document, FullBeanImpl full new FullBeanSolrProperties().setProperties(document, fullBean); // Gather the licenses. - final List licenses = Optional.ofNullable(fullBean.getLicenses()).stream() - .flatMap(List::stream).filter(Objects::nonNull).collect(Collectors.toList()); + final List licenses = ofNullable(fullBean.getLicenses()).stream() + .flatMap(List::stream).filter(Objects::nonNull) + .toList(); // Gather the quality annotations. - final Set acceptableTargets = Optional.ofNullable(fullBean.getAggregations()).stream() - .flatMap(Collection::stream).filter(Objects::nonNull) - .map(AggregationImpl::getAbout) - .filter(Objects::nonNull).collect(Collectors.toSet()); - final Predicate hasAcceptableTarget = annotation -> Optional - .ofNullable(annotation.getTarget()).stream().flatMap(Arrays::stream) - .anyMatch(acceptableTargets::contains); - final List annotationsToAdd = Optional - .ofNullable(fullBean.getQualityAnnotations()).map(List::stream).orElseGet(Stream::empty) - .filter(Objects::nonNull) - .filter(annotation -> StringUtils.isNotBlank(annotation.getBody())) - .filter(hasAcceptableTarget) - .collect(Collectors.toList()); + final Set acceptableTargets = ofNullable(fullBean.getAggregations()).stream() + .flatMap(Collection::stream) + .filter(Objects::nonNull) + .map(AggregationImpl::getAbout) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + final Predicate hasAcceptableTarget = annotation -> ofNullable(annotation.getTarget()).stream().flatMap( + Arrays::stream) + .anyMatch( + acceptableTargets::contains); + final List annotationsToAdd = ofNullable(fullBean.getQualityAnnotations()).map(List::stream) + .orElseGet(Stream::empty) + .filter(Objects::nonNull) + .filter( + annotation -> StringUtils.isNotBlank( + annotation.getBody())) + .filter(hasAcceptableTarget) + .collect(Collectors.toList()); new QualityAnnotationSolrCreator().addAllToDocument(document, annotationsToAdd); // Add the containing objects. - new ProvidedChoSolrCreator().addToDocument(document, fullBean.getProvidedCHOs().get(0)); + new ProvidedChoSolrCreator().addToDocument(document, fullBean.getProvidedCHOs().getFirst()); new AggregationSolrCreator(licenses, fullBean.getOrganizations()) - .addToDocument(document, getDataProviderAggregations(fullBean).get(0)); + .addToDocument(document, getDataProviderAggregations(fullBean).getFirst()); new EuropeanaAggregationSolrCreator(licenses) .addToDocument(document, fullBean.getEuropeanaAggregation()); new ProxySolrCreator().addAllToDocument(document, fullBean.getProxies()); @@ -151,13 +175,67 @@ public void populateWithFacets(SolrInputDocument document, RdfWrapper rdf) { } } + /** + * Populates Solr document with the date range fields. Please note: this method should only be called once on a * given + * document, otherwise the behavior is not defined. + * + * @param document The document to populate. + * @param rdfWrapper The RDF to populate from. + */ + public void populateWithDateRanges(SolrInputDocument document, RdfWrapper rdfWrapper) { + populateWithDateRanges(document, rdfWrapper, Choice::ifCreated, Choice::getCreated, CREATED_DATE, CREATED_DATE_BEGIN, + CREATED_DATE_END); + populateWithDateRanges(document, rdfWrapper, Choice::ifIssued, Choice::getIssued, ISSUED_DATE, ISSUED_DATE_BEGIN, + ISSUED_DATE_END); + } + + private void populateWithDateRanges(SolrInputDocument document, RdfWrapper rdfWrapper, Predicate choiceTypePredicate, + Function choiceValueGetter, EdmLabel edmLabelDate, EdmLabel edmLabelDateBegin, + EdmLabel edmLabelDateEnd) { + final List normalizedTimeSpans + = rdfWrapper.getTimeSpans().stream().filter(timeSpanType -> timeSpanType.getNotation() != null).toList(); + + final ProxyType europeanaProxy = rdfWrapper.getProxies().stream().filter(RdfWrapper::isEuropeanaProxy).findFirst() + .orElseThrow(); + + final List proxyChoiceLinks = europeanaProxy.getChoiceList().stream().filter(choiceTypePredicate) + .map(choiceValueGetter).map(ResourceOrLiteralType::getResource) + .map(Resource::getResource).toList(); + + final List proxyChoiceMatchingTimeSpans = normalizedTimeSpans.stream().filter( + timeSpanType -> proxyChoiceLinks.contains(timeSpanType.getAbout())).toList(); + + Optional earliestBegin = empty(); + Optional latestEnd = empty(); + for (TimeSpanType timeSpanType : proxyChoiceMatchingTimeSpans) { + final String begin = ofNullable(timeSpanType.getBegin()).map(Begin::getString).orElse(null); + final String end = ofNullable(timeSpanType.getEnd()).map(End::getString).orElse(null); + // If either 'begin' or 'end' is null, set it to the value of the other + final String finalBegin = ofNullable(begin).orElse(end); + final String finalEnd = ofNullable(end).orElse(begin); + // We only need to check if finalBegin is no-null since if finalBegin is non-null then finalEnd will certainly be non-null + if (finalBegin != null) { + document.addField(edmLabelDate.toString(), String.format("[%s TO %s]", finalBegin, finalEnd)); + + final LocalDate localDateFinalBegin = LocalDate.parse(finalBegin); + final LocalDate localDateFinalEnd = LocalDate.parse(finalEnd); + + earliestBegin = earliestBegin.map(earliest -> localDateFinalBegin.isBefore(earliest) ? localDateFinalBegin : earliest) + .or(() -> of(localDateFinalBegin)); + latestEnd = latestEnd.map(latest -> localDateFinalEnd.isAfter(latest) ? localDateFinalEnd : latest) + .or(() -> of(localDateFinalEnd)); + } + } + earliestBegin.ifPresent(date -> document.addField(edmLabelDateBegin.toString(), date.toString())); + latestEnd.ifPresent(date -> document.addField(edmLabelDateEnd.toString(), date.toString())); + } + private List getDataProviderAggregations(FullBeanImpl fullBean) { List proxyInResult = fullBean.getProxies().stream() .filter(not(ProxyImpl::isEuropeanaProxy)) .filter(proxy -> ArrayUtils.isEmpty(proxy.getLineage())).map(ProxyImpl::getProxyIn) - .map(Arrays::asList).flatMap(List::stream).collect(Collectors.toList()); + .map(Arrays::asList).flatMap(List::stream).toList(); - return fullBean.getAggregations().stream().filter(x -> proxyInResult.contains(x.getAbout())) - .collect(Collectors.toList()); + return fullBean.getAggregations().stream().filter(x -> proxyInResult.contains(x.getAbout())).toList(); } } diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/FacetEncoder.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/FacetEncoder.java index a673bfa28d..6d0e59dde6 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/FacetEncoder.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/FacetEncoder.java @@ -158,7 +158,7 @@ private static Set getFacetSearchCodes(EncodedFacetCollection mediaType } // Filter the code lists so that empty sets or null sets are ignored. final List> filteredCodes = codes.stream().filter(Objects::nonNull) - .filter(set->!set.isEmpty()).collect(Collectors.toList()); + .filter(set->!set.isEmpty()).toList(); final int shiftedMediaTypeCode = getShiftedMediaTypeCode(mediaType); return SetUtils.generateForcedCombinations(filteredCodes, shiftedMediaTypeCode, (combination, code) -> combination | code); @@ -389,11 +389,11 @@ private static List> compileIntegerSets(WebResourceWrapper webResou return Collections.emptyList(); } return facets.getFacets().stream().map(facet -> facet.encodeValues(webResource)) - .filter(set -> !set.isEmpty()).collect(Collectors.toList()); + .filter(set -> !set.isEmpty()).toList(); } private static List> compileIntegerSets(FacetWithValues... values) { - return Stream.of(values).map(FacetWithValues::compileIntegerSet).collect(Collectors.toList()); + return Stream.of(values).map(FacetWithValues::compileIntegerSet).toList(); } private static List> compileAudioIntegerSets(Set mimeTypes, diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/value/ImageColorSpace.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/value/ImageColorSpace.java index e6fa7c411b..d6491fd986 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/value/ImageColorSpace.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/facet/value/ImageColorSpace.java @@ -28,17 +28,11 @@ public int getCode() { * @return The category, or null if none of the categories apply. */ public static ImageColorSpace categorizeImageColorSpace(final ColorSpace colorSpace) { - final ImageColorSpace result; - if (ColorSpace.COLOR == colorSpace) { - result = COLOR; - } else if (ColorSpace.GRAYSCALE == colorSpace) { - result = GRAYSCALE; - } else if (ColorSpace.OTHER == colorSpace) { - result = OTHER; - } else { - result = null; - } - return result; + return switch (colorSpace) { + case ColorSpace.COLOR -> ImageColorSpace.COLOR; + case ColorSpace.GRAYSCALE -> ImageColorSpace.GRAYSCALE; + case ColorSpace.OTHER -> ImageColorSpace.OTHER; + }; } /** diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/solr/property/AggregationSolrCreator.java b/metis-indexing/src/main/java/eu/europeana/indexing/solr/property/AggregationSolrCreator.java index 0d7d757085..00805ef0c3 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/solr/property/AggregationSolrCreator.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/solr/property/AggregationSolrCreator.java @@ -24,8 +24,6 @@ /** * Property Solr Creator for 'ore:Aggregation' tags. - * - * @author Yorgos.Mamakis@ europeana.eu */ public class AggregationSolrCreator implements PropertySolrCreator { @@ -44,9 +42,9 @@ public AggregationSolrCreator(List licenses, List organizations) { this.licenses = new ArrayList<>(licenses); this.organizationPrefLabelMap = organizations.stream() - .filter(org -> StringUtils.isNotBlank(org.getAbout())) - .collect(Collectors.toMap(OrganizationImpl::getAbout, - AggregationSolrCreator::findPrefLabelForOrganization, (o1, o2) -> o1)); + .filter(org -> StringUtils.isNotBlank(org.getAbout())) + .collect(Collectors.toMap(OrganizationImpl::getAbout, + AggregationSolrCreator::findPrefLabelForOrganization, (o1, o2) -> o1)); } private static Pair findPrefLabelForOrganization(OrganizationImpl organization) { @@ -60,16 +58,16 @@ private static Pair findPrefLabelForOrganization(OrganizationImp .flatMap(List::stream).filter(Objects::nonNull).findFirst() .ifPresent(value -> englishValues.add(new ImmutablePair<>("eng", value))); if (!englishValues.isEmpty()) { - return englishValues.get(0); + return englishValues.getFirst(); } // Otherwise return any value (if available). return Optional.ofNullable(organization.getPrefLabel()).map(Map::entrySet).stream() - .flatMap(Collection::stream) - .filter(Objects::nonNull).filter(entry -> entry.getValue() != null) - .flatMap(entry -> entry.getValue().stream().filter(StringUtils::isNotBlank) - .map(value -> new ImmutablePair<>(entry.getKey(), value))) - .findFirst().orElse(null); + .flatMap(Collection::stream) + .filter(Objects::nonNull).filter(entry -> entry.getValue() != null) + .flatMap(entry -> entry.getValue().stream().filter(StringUtils::isNotBlank) + .map(value -> new ImmutablePair<>(entry.getKey(), value))) + .findFirst().orElse(null); } @Override @@ -88,7 +86,7 @@ public void addToDocument(SolrInputDocument doc, Aggregation aggregation) { //Single value, contains provider uri(in practice the list provided has one or no value) dataProviderPair.getLeft().stream().findFirst() - .ifPresent(uri -> SolrPropertyUtils.addValue(doc, EdmLabel.DATA_PROVIDER, uri)); + .ifPresent(uri -> SolrPropertyUtils.addValue(doc, EdmLabel.DATA_PROVIDER, uri)); //Multivalued, contains provider and intermediate uris SolrPropertyUtils.addValues(doc, EdmLabel.PROVIDER, combinedProviderAndIntermediateUris); @@ -138,7 +136,7 @@ private Pair, Map>> extractUrisAndLiterals( } private void splitOrganizationUrisFromLiterals(Map> urisLiteralsMap, - Set organizationUris, Map> literalsMap) { + Set organizationUris, Map> literalsMap) { for (Map.Entry> entry : urisLiteralsMap.entrySet()) { final List literals = new ArrayList<>(); for (String value : entry.getValue()) { @@ -155,7 +153,7 @@ private void splitOrganizationUrisFromLiterals(Map> urisLit } private void addOrganizationPrefLabelsToLiterals(Set organizationUris, - Map> literalsMap) { + Map> literalsMap) { for (String organizationUri : organizationUris) { final Pair entry = organizationPrefLabelMap.get(organizationUri); if (entry != null) { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/ClassifierFactory.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/ClassifierFactory.java index af9836e08a..3212b196d1 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/ClassifierFactory.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/ClassifierFactory.java @@ -26,7 +26,7 @@ private ClassifierFactory() { * @return A classifier for the metadata tier with Provider Proxies as default mode. */ public static TierClassifier getMetadataClassifier() { - return new MetadataClassifier(new LanguageClassifier(), new EnablingElementsClassifier(), new ContextualClassesClassifier()); + return getMetadataClassifier(ClassifierMode.PROVIDER_PROXIES); } /** diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/AbstractMediaClassifier.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/AbstractMediaClassifier.java index 798afc0597..213852ae99 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/AbstractMediaClassifier.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/AbstractMediaClassifier.java @@ -18,7 +18,6 @@ import java.util.EnumSet; import java.util.LinkedList; import java.util.List; -import java.util.stream.Collectors; /** * This is the superclass of all classifiers for specific media types. Classification happens both for the entity as a whole and @@ -56,7 +55,7 @@ public final TierClassification classify(RdfWra webResources.stream().map( resource -> classifyWebResourceAndLicense(resource, entityLicenseType, hasLandingPage, hasEmbeddableMedia)) .sorted(Comparator.comparing(MediaResourceTechnicalMetadata::getMediaTier, Tier.getComparator().reversed())) - .collect(Collectors.toList()); + .toList(); //Get the highest value or else default mediaTier = descendingMediaResourceTechnicalMetadata.stream().map(MediaResourceTechnicalMetadata::getMediaTier).findFirst() .orElse(MediaTier.T0); diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/EmbeddableMedia.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/EmbeddableMedia.java index beaf5b9b2f..e899fb231a 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/EmbeddableMedia.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/media/EmbeddableMedia.java @@ -2,9 +2,12 @@ import eu.europeana.indexing.utils.RdfWrapper; import eu.europeana.indexing.utils.WebResourceLinkType; +import eu.europeana.metis.schema.jibx.HasMimeType; +import eu.europeana.metis.schema.jibx.WebResourceType; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; +import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -36,8 +39,8 @@ final class EmbeddableMedia { "https://api.picturepipe.net/api/html/widgets/public/playout_cloudfront?token="); private static final Collection URL_EUSCREEN = Arrays.asList( - "http://www.euscreen.eu/item.html", - "https://www.euscreen.eu/item.html*"); + "http://www.euscreen.eu/item.html", + "https://www.euscreen.eu/item.html*"); private static final Collection URL_SKETCHFAB = Arrays.asList( "https://sketchfab.com/3d-models", @@ -66,15 +69,17 @@ final class EmbeddableMedia { URL_VIMEO.stream(), URL_YOUTUBE.stream()) .reduce(Stream::concat) - .get() - .collect(Collectors.toList()); + .get().toList(); + + private static final String OEMBED_XML = "application/xml+oembed"; + private static final String OEMBED_JSON = "application/json+oembed"; // Create patterns from the urls, quote url, wildcards are allowed in the pattern, so we do not quote those, // and we also add a wildcard at the end of each url private static final Collection PATTERNS = URL_MATCHING_LIST.stream() .map(EmbeddableMedia::quotedRegexFromString) .map(Pattern::compile) - .collect(Collectors.toList()); + .toList(); // Quote the string but not asterisk(*) characters. Asterisk character get converted to the regex // equivalent (.*). @@ -97,7 +102,26 @@ private EmbeddableMedia() { */ static boolean hasEmbeddableMedia(RdfWrapper entity) { return entity.getUrlsOfTypes(EnumSet.of(WebResourceLinkType.IS_SHOWN_BY)).stream() - .anyMatch(EmbeddableMedia::isEmbeddableMedia); + .anyMatch(EmbeddableMedia::isEmbeddableMedia) + || isOEmbeddableMedia(entity); + } + + /** + * Is an OEmbeddable media . + * + * @param entity the entity + * @return true, if the mimetype has application/json+oembed or application/xml+oembed + */ + static boolean isOEmbeddableMedia(RdfWrapper entity) { + return entity.getWebResources() + .stream() + .filter(Objects::nonNull) + .map(WebResourceType::getHasMimeType). + filter(Objects::nonNull) + .map(HasMimeType::getHasMimeType) + .filter(Objects::nonNull) + .anyMatch(value -> value.startsWith(OEMBED_XML) || value.startsWith(OEMBED_JSON)); + } private static boolean isEmbeddableMedia(String url) { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/metadata/ContextualClassesClassifier.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/metadata/ContextualClassesClassifier.java index 5d56045e67..1d33407e4e 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/metadata/ContextualClassesClassifier.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/metadata/ContextualClassesClassifier.java @@ -37,7 +37,7 @@ public class ContextualClassesClassifier implements TierClassifierBreakdown Function> getPredicatesFromChoice( Predicate isRightChoice, Function getPredicateFromChoice) { return proxy -> Optional.of(proxy).map(ProxyType::getChoiceList).stream().flatMap(Collection::stream).filter(Objects::nonNull) - .filter(isRightChoice) - .map(getPredicateFromChoice).filter(Objects::nonNull).collect(Collectors.toList()); + .filter(isRightChoice).map(getPredicateFromChoice).filter(Objects::nonNull).toList(); } LinkAndValueGetter getLinkAndValueGetter() { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/model/MediaTier.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/model/MediaTier.java index c858ab971f..5c458b2850 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/model/MediaTier.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/model/MediaTier.java @@ -28,6 +28,11 @@ public String toString() { return stringRepresentation; } + /** + * Get the Enum representation given a string value + * @param value the string value + * @return the enum representation + */ public static MediaTier getEnum(String value){ MediaTier result = null; for(MediaTier tier: MediaTier.values()) { diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/view/LanguageBreakdown.java b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/view/LanguageBreakdown.java index 077ffdc80e..aea4344f87 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/tiers/view/LanguageBreakdown.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/tiers/view/LanguageBreakdown.java @@ -21,7 +21,7 @@ public class LanguageBreakdown implements TierProvider { /** * Constructor with required parameters. * - * @param qualifiedElements the qualified elementes + * @param qualifiedElements the qualified elements * @param qualifiedElementsWithoutLanguageList the qualified elements that do not contain a language * @param metadataTier the tier for the breakdown */ diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfTierUtils.java b/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfTierUtils.java index 8dd7bd6555..16c836fa76 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfTierUtils.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfTierUtils.java @@ -122,8 +122,7 @@ private static void setTierInternal(RDF rdf, Tier tier) final HasQualityAnnotation link = getQualityAnnotation(aggregatorAggregation.getAbout(), rdfTier); aggregatorAggregation.setHasQualityAnnotationList( - Stream.concat(getExistingAnnotations(link, aggregatorAggregation.getHasQualityAnnotationList()), Stream.of(link)) - .collect(Collectors.toList())); + Stream.concat(getExistingAnnotations(link, aggregatorAggregation.getHasQualityAnnotationList()), Stream.of(link)).toList()); } private static void setTierInternalEuropeana(RDF rdf, Tier tier) @@ -145,8 +144,7 @@ private static void setTierInternalEuropeana(RDF rdf, Tier tier) final HasQualityAnnotation link = getQualityAnnotation(europeanaAggregationType.getAbout(), rdfTier); europeanaAggregationType.setHasQualityAnnotationList( - Stream.concat(getExistingAnnotations(link, europeanaAggregationType.getHasQualityAnnotationList()), Stream.of(link)) - .collect(Collectors.toList())); + Stream.concat(getExistingAnnotations(link, europeanaAggregationType.getHasQualityAnnotationList()), Stream.of(link)).toList()); } @NotNull diff --git a/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfWrapper.java b/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfWrapper.java index 854808dc8d..a836e959cd 100644 --- a/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfWrapper.java +++ b/metis-indexing/src/main/java/eu/europeana/indexing/utils/RdfWrapper.java @@ -63,13 +63,19 @@ public RdfWrapper(RDF rdfRecord) { this.rdfRecord = rdfRecord; } - private static boolean isEuropeanaProxy(ProxyType proxy) { + /** + * Identify if a proxy is a Europeana one. + * + * @param proxy the proxy to identify + * @return true if it is a Europeana proxy, otherwise false + */ + public static boolean isEuropeanaProxy(ProxyType proxy) { return Optional.of(proxy).map(ProxyType::getEuropeanaProxy) .map(EuropeanaProxy::isEuropeanaProxy).orElse(Boolean.FALSE); } private static List getFilteredPropertyList(List propertyList) { - return getFilteredPropertyStream(propertyList).collect(Collectors.toList()); + return getFilteredPropertyStream(propertyList).toList(); } private static Stream getFilteredPropertyStream(List propertyList) { @@ -121,30 +127,26 @@ public Optional getEuropeanaAggregation() { public List getProviderProxyIdentifiers() { final List choiceList = getProviderProxiesChoices(); - return choiceList.stream().filter(Choice::ifIdentifier).map(Choice::getIdentifier) - .filter(Objects::nonNull).collect(Collectors.toList()); + return choiceList.stream().filter(Choice::ifIdentifier).map(Choice::getIdentifier).filter(Objects::nonNull).toList(); } public List getProviderProxyTitles() { final List<Choice> choiceList = getProviderProxiesChoices(); - return choiceList.stream().filter(Choice::ifTitle).map(Choice::getTitle) - .filter(Objects::nonNull).collect(Collectors.toList()); + return choiceList.stream().filter(Choice::ifTitle).map(Choice::getTitle).filter(Objects::nonNull).toList(); } public List<Description> getProviderProxyDescriptions() { final List<Choice> choiceList = getProviderProxiesChoices(); - return choiceList.stream().filter(Choice::ifDescription).map(Choice::getDescription) - .filter(Objects::nonNull).collect(Collectors.toList()); + return choiceList.stream().filter(Choice::ifDescription).map(Choice::getDescription).filter(Objects::nonNull).toList(); } public List<Choice> getProviderProxiesChoices() { - return getProviderProxies().stream().map(EuropeanaType::getChoiceList).filter(Objects::nonNull) - .flatMap(Collection::stream).filter(Objects::nonNull).collect(Collectors.toList()); + return getProviderProxies().stream().map(EuropeanaType::getChoiceList).filter(Objects::nonNull).flatMap(Collection::stream) + .filter(Objects::nonNull).toList(); } public List<IsShownBy> getIsShownByList() { - return getAggregations().stream().map(Aggregation::getIsShownBy).filter(Objects::nonNull) - .collect(Collectors.toList()); + return getAggregations().stream().map(Aggregation::getIsShownBy).filter(Objects::nonNull).toList(); } /** @@ -171,8 +173,7 @@ public List<ProxyType> getProxies() { * @return The list of proxies. Is not null, but could be empty. */ public List<ProxyType> getProviderProxies() { - return getProxies().stream().filter(proxy -> !isEuropeanaProxy(proxy)) - .collect(Collectors.toList()); + return getProxies().stream().filter(proxy -> !isEuropeanaProxy(proxy)).toList(); } /** @@ -261,8 +262,7 @@ public List<WebResourceWrapper> getWebResourceWrappers(Set<WebResourceLinkType> types); return getFilteredPropertyStream(rdfRecord.getWebResourceList()) .filter(webResource -> webResourceUrlsWithTypes.containsKey(webResource.getAbout())).map( - webResource -> new WebResourceWrapper(webResource, - webResourceUrlsWithTypes.get(webResource.getAbout()))).collect(Collectors.toList()); + webResource -> new WebResourceWrapper(webResource, webResourceUrlsWithTypes.get(webResource.getAbout()))).toList(); } /** @@ -288,8 +288,7 @@ public List<WebResourceWrapper> getWebResourceWrappers() { final Map<String, Set<WebResourceLinkType>> webResourceUrlsWithTypes = getAllLinksForTypes( Stream.of(WebResourceLinkType.values()).collect(Collectors.toSet())); return getFilteredPropertyStream(rdfRecord.getWebResourceList()).map( - webResource -> new WebResourceWrapper(webResource, - webResourceUrlsWithTypes.get(webResource.getAbout()))).collect(Collectors.toList()); + webResource -> new WebResourceWrapper(webResource, webResourceUrlsWithTypes.get(webResource.getAbout()))).toList(); } /** @@ -304,8 +303,7 @@ public List<WebResourceType> getWebResources(Set<WebResourceLinkType> types) { final Map<String, Set<WebResourceLinkType>> webResourceUrlsWithTypes = getAllLinksForTypes( types); return getFilteredPropertyStream(rdfRecord.getWebResourceList()) - .filter(webResource -> webResourceUrlsWithTypes.containsKey(webResource.getAbout())) - .collect(Collectors.toList()); + .filter(webResource -> webResourceUrlsWithTypes.containsKey(webResource.getAbout())).toList(); } /** @@ -444,8 +442,6 @@ private List<Aggregation> getAggregations(Predicate<? super ProxyType> proxyType .map(ProxyIn::getResource) .collect(Collectors.toSet()); - return rdfRecord.getAggregationList().stream() - .filter(aggregation -> proxyInList.contains(aggregation.getAbout())) - .collect(Collectors.toList()); + return rdfRecord.getAggregationList().stream().filter(aggregation -> proxyInList.contains(aggregation.getAbout())).toList(); } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/ClientsConnectionProviderTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/ClientsConnectionProviderTest.java new file mode 100644 index 0000000000..8eadf378ad --- /dev/null +++ b/metis-indexing/src/test/java/eu/europeana/indexing/ClientsConnectionProviderTest.java @@ -0,0 +1,78 @@ +package eu.europeana.indexing; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import eu.europeana.metis.mongo.dao.RecordDao; +import eu.europeana.metis.mongo.dao.RecordRedirectDao; +import java.io.IOException; +import org.apache.solr.client.solrj.SolrClient; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +class ClientsConnectionProviderTest { + + @Test + void testConstructorWithAllParameters() throws IOException { + RecordDao recordDao = Mockito.mock(RecordDao.class); + RecordDao tombstoneRecordDao = Mockito.mock(RecordDao.class); + RecordRedirectDao recordRedirectDao = Mockito.mock(RecordRedirectDao.class); + try (SolrClient solrClient = Mockito.mock(SolrClient.class)) { + + ClientsConnectionProvider clientsConnectionProvider = new ClientsConnectionProvider(recordDao, tombstoneRecordDao, + recordRedirectDao, solrClient); + + assertEquals(recordDao, clientsConnectionProvider.getRecordDao()); + assertEquals(tombstoneRecordDao, clientsConnectionProvider.getTombstoneRecordDao()); + assertEquals(recordRedirectDao, clientsConnectionProvider.getRecordRedirectDao()); + assertEquals(solrClient, clientsConnectionProvider.getSolrClient()); + } + } + + @Test + void testConstructorWithRequiredParametersOnly() throws IOException { + RecordDao recordDao = Mockito.mock(RecordDao.class); + RecordRedirectDao recordRedirectDao = Mockito.mock(RecordRedirectDao.class); + try (SolrClient solrClient = Mockito.mock(SolrClient.class)) { + + ClientsConnectionProvider clientsConnectionProvider = new ClientsConnectionProvider(recordDao, recordRedirectDao, + solrClient); + + assertEquals(recordDao, clientsConnectionProvider.getRecordDao()); + assertNull(clientsConnectionProvider.getTombstoneRecordDao()); + assertEquals(recordRedirectDao, clientsConnectionProvider.getRecordRedirectDao()); + assertEquals(solrClient, clientsConnectionProvider.getSolrClient()); + } + } + + @Test + void testConstructorThrowsExceptionForNullRecordDao() throws IOException { + RecordDao recordDao = Mockito.mock(RecordDao.class); + RecordRedirectDao recordRedirectDao = Mockito.mock(RecordRedirectDao.class); + try (SolrClient solrClient = Mockito.mock(SolrClient.class)) { + + assertThrows(NullPointerException.class, () -> + new ClientsConnectionProvider(null, recordRedirectDao, solrClient) + ); + + assertThrows(NullPointerException.class, () -> + new ClientsConnectionProvider(recordDao, recordRedirectDao, null) + ); + } + } + + @Test + void testCloseDoesNothing() throws IOException { + RecordDao recordDao = Mockito.mock(RecordDao.class); + RecordDao tombstoneRecordDao = Mockito.mock(RecordDao.class); + RecordRedirectDao recordRedirectDao = Mockito.mock(RecordRedirectDao.class); + try (SolrClient solrClient = Mockito.mock(SolrClient.class)) { + + ClientsConnectionProvider provider = new ClientsConnectionProvider(recordDao, tombstoneRecordDao, recordRedirectDao, + solrClient); + assertDoesNotThrow(provider::close); + } + } +} diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/IndexerPreprocessorTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/IndexerPreprocessorTest.java index bbd31f24f0..70b36ad3a5 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/IndexerPreprocessorTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/IndexerPreprocessorTest.java @@ -1,6 +1,9 @@ package eu.europeana.indexing; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import eu.europeana.indexing.base.IndexingTestUtils; import eu.europeana.indexing.exception.IndexingException; @@ -18,6 +21,9 @@ */ class IndexerPreprocessorTest { + private static final String CONTENT_TIER_URI = "http://www.europeana.eu/schemas/epf/contentTier"; + private static final String METADATA_TIER_URI = "http://www.europeana.eu/schemas/epf/metadataTier"; + /** * Preprocess record. * @@ -26,16 +32,46 @@ class IndexerPreprocessorTest { */ @Test void preprocessRecord() throws SerializationException, IndexingException { + // given final RdfConversionUtils conversionUtils = new RdfConversionUtils(); final RDF inputRdf = conversionUtils.convertStringToRdf( - IndexingTestUtils.getResourceFileContent("europeana_record_to_sample_index_rdf.xml")); + IndexingTestUtils.getResourceFileContent("europeana_record_tier_calculation_rdf.xml")); final IndexingProperties indexingProperties = new IndexingProperties(Date.from(Instant.now()), true, List.of(), true, true); + // when TierResults results = IndexerPreprocessor.preprocessRecord(inputRdf, indexingProperties); - assertEquals("4", results.getMediaTier().toString()); - assertEquals("B", results.getMetadataTier().toString()); + // then + List<String> tierProvidedData = inputRdf.getAggregationList() + .stream() + .map(provideddata -> provideddata.getHasQualityAnnotationList() + .stream() + .map(q -> q.getQualityAnnotation().getHasBody() + .getResource()).toList()) + .findFirst().orElse(null); + + List<String> tierEuropeanaData = inputRdf.getEuropeanaAggregationList() + .stream() + .map(eudata -> eudata.getHasQualityAnnotationList() + .stream() + .map(q -> q.getQualityAnnotation().getHasBody().getResource()) + .toList()) + .findFirst().orElse(null); + + // verify two different aggregation has different calculations + assertArrayEquals(new String[]{CONTENT_TIER_URI + "1", METADATA_TIER_URI + "A"}, tierProvidedData.toArray()); + assertArrayEquals(new String[]{CONTENT_TIER_URI + "1", METADATA_TIER_URI + "B"}, tierEuropeanaData.toArray()); + + // verify return of tier calculation + assertEquals("1", results.getMediaTier().toString()); + assertEquals("A", results.getMetadataTier().toString()); + + // verify return is equal to aggregation and not europeana aggregation + assertTrue(tierProvidedData.contains(CONTENT_TIER_URI + results.getMediaTier().toString()) && + tierProvidedData.contains(METADATA_TIER_URI + results.getMetadataTier().toString())); + assertFalse(tierEuropeanaData.contains(CONTENT_TIER_URI + results.getMediaTier().toString()) && + tierEuropeanaData.contains(METADATA_TIER_URI + results.getMetadataTier().toString())); } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/SimpleIndexerFactoryTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/SimpleIndexerFactoryTest.java index 7426567e47..42979e73e8 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/SimpleIndexerFactoryTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/SimpleIndexerFactoryTest.java @@ -1,7 +1,7 @@ package eu.europeana.indexing; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import eu.europeana.indexing.exception.SetupRelatedIndexingException; import eu.europeana.indexing.mongo.MongoIndexer; @@ -24,7 +24,7 @@ void getSolrIndexer() throws SetupRelatedIndexingException, URISyntaxException { solrProperties.addSolrHost(new URI("http://localhost:8983")); SolrIndexingSettings settings = new SolrIndexingSettings(solrProperties); - assertTrue(simpleIndexerFactory.getIndexer(settings) instanceof SolrIndexer); + assertInstanceOf(SolrIndexer.class, simpleIndexerFactory.getIndexer(settings)); } @Test @@ -33,10 +33,12 @@ void getMongoIndexer() throws SetupRelatedIndexingException { mongoProperties.setMongoHosts(new String[]{"localhost"},new int[]{27001}); MongoIndexingSettings settings = new MongoIndexingSettings(mongoProperties); settings.setMongoDatabaseName("recordDB"); + settings.setMongoTombstoneDatabaseName("tombstoneRecordDB"); settings.setRecordRedirectDatabaseName("recordRedirectDB"); - assertTrue(simpleIndexerFactory.getIndexer(settings) instanceof MongoIndexer); + assertInstanceOf(MongoIndexer.class, simpleIndexerFactory.getIndexer(settings)); assertEquals("recordDB", settings.getMongoDatabaseName()); + assertEquals("tombstoneRecordDB", settings.getMongoTombstoneDatabaseName()); assertEquals("recordRedirectDB", settings.getRecordRedirectDatabaseName()); } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/base/MongoDBContainerIT.java b/metis-indexing/src/test/java/eu/europeana/indexing/base/MongoDBContainerIT.java index b73575ec5b..9d521d0966 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/base/MongoDBContainerIT.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/base/MongoDBContainerIT.java @@ -40,6 +40,7 @@ public void logConfiguration() { public void dynamicProperties(DynamicPropertyRegistry registry) { registry.add("mongo.application-name", () -> "mongo-testcontainer-test"); registry.add("mongo.db", () -> "test"); + registry.add("mongo.tombstone.db", () -> "test_tombstone"); registry.add("mongo.redirect.db", () -> "test_redirect"); registry.add("mongo.hosts", mongoDBContainer::getHost); registry.add("mongo.port", mongoDBContainer::getFirstMappedPort); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/AgentFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/AgentFieldInputTest.java index 9158698923..6d75f300d5 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/AgentFieldInputTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/AgentFieldInputTest.java @@ -9,6 +9,8 @@ import dev.morphia.Datastore; import dev.morphia.query.Query; import dev.morphia.query.filters.Filters; +import eu.europeana.corelib.solr.entity.AgentImpl; +import eu.europeana.metis.mongo.dao.RecordDao; import eu.europeana.metis.schema.jibx.AgentType; import eu.europeana.metis.schema.jibx.AltLabel; import eu.europeana.metis.schema.jibx.Begin; @@ -16,17 +18,12 @@ import eu.europeana.metis.schema.jibx.LiteralType.Lang; import eu.europeana.metis.schema.jibx.Note; import eu.europeana.metis.schema.jibx.PrefLabel; -import eu.europeana.metis.mongo.dao.RecordDao; -import eu.europeana.corelib.solr.entity.AgentImpl; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Test; /** * Unit test for the Agent field input creator - * - * @author Yorgos.Mamakis@ kb.nl - * */ class AgentFieldInputTest { @@ -83,17 +80,12 @@ void testAgent() { mongoServerMock.getDatastore().save(agent); assertEquals(agentType.getAbout(), agent.getAbout()); assertEquals(agentType.getBegin().getString(), - agent.getBegin().values().iterator().next().get(0)); - assertEquals(agentType.getEnd().getString(), agent.getEnd().values().iterator().next().get(0)); - assertEquals(agentType.getNoteList().get(0).getString(), - agent.getNote().values().iterator().next().get(0)); - assertTrue( - agent.getAltLabel().containsKey(agentType.getAltLabelList().get(0).getLang().getLang())); - assertTrue( - agent.getPrefLabel().containsKey(agentType.getPrefLabelList().get(0).getLang().getLang())); - assertEquals(agentType.getAltLabelList().get(0).getString(), - agent.getAltLabel().values().iterator().next().get(0)); - assertEquals(agentType.getPrefLabelList().get(0).getString(), - agent.getPrefLabel().values().iterator().next().get(0)); + agent.getBegin().values().iterator().next().getFirst()); + assertEquals(agentType.getEnd().getString(), agent.getEnd().values().iterator().next().getFirst()); + assertEquals(agentType.getNoteList().getFirst().getString(), agent.getNote().values().iterator().next().getFirst()); + assertTrue(agent.getAltLabel().containsKey(agentType.getAltLabelList().getFirst().getLang().getLang())); + assertTrue(agent.getPrefLabel().containsKey(agentType.getPrefLabelList().getFirst().getLang().getLang())); + assertEquals(agentType.getAltLabelList().getFirst().getString(), agent.getAltLabel().values().iterator().next().getFirst()); + assertEquals(agentType.getPrefLabelList().getFirst().getString(), agent.getPrefLabel().values().iterator().next().getFirst()); } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ConceptFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ConceptFieldInputTest.java index 5fa6542739..328ac07869 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ConceptFieldInputTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ConceptFieldInputTest.java @@ -22,8 +22,6 @@ /** * Unit tests for Concepts field input creator - * - * @author Yorgos.Mamakis@ kb.nl */ class ConceptFieldInputTest { @@ -77,20 +75,15 @@ void testConcept() { assertEquals(concept.getAbout(), conceptMongo.getAbout()); for (Concept.Choice choice2 : concept.getChoiceList()) { if (choice2.ifNote()) { - assertEquals(choice2.getNote().getString(), - conceptMongo.getNote().values().iterator().next().get(0)); + assertEquals(choice2.getNote().getString(), conceptMongo.getNote().values().iterator().next().getFirst()); } if (choice2.ifAltLabel()) { - assertTrue( - conceptMongo.getAltLabel().containsKey(choice2.getAltLabel().getLang().getLang())); - assertEquals(choice2.getAltLabel().getString(), - conceptMongo.getAltLabel().values().iterator().next().get(0)); + assertTrue(conceptMongo.getAltLabel().containsKey(choice2.getAltLabel().getLang().getLang())); + assertEquals(choice2.getAltLabel().getString(), conceptMongo.getAltLabel().values().iterator().next().getFirst()); } if (choice2.ifPrefLabel()) { - assertTrue( - conceptMongo.getPrefLabel().containsKey(choice2.getPrefLabel().getLang().getLang())); - assertEquals(choice2.getPrefLabel().getString(), - conceptMongo.getPrefLabel().values().iterator().next().get(0)); + assertTrue(conceptMongo.getPrefLabel().containsKey(choice2.getPrefLabel().getLang().getLang())); + assertEquals(choice2.getPrefLabel().getString(), conceptMongo.getPrefLabel().values().iterator().next().getFirst()); } } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/FieldInputUtilsTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/FieldInputUtilsTest.java index 0eb0059fa6..ad5bf0d2d4 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/FieldInputUtilsTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/FieldInputUtilsTest.java @@ -19,9 +19,6 @@ /** * FieldInputUtils unit tests - * - * @author Yorgos.Mamakis@ kb.nl - * */ class FieldInputUtilsTest { @@ -36,7 +33,7 @@ void testCreateLiteralMapFromObject() { assertNotNull(testMap); assertEquals(1, testMap.size()); assertEquals("en", testMap.keySet().iterator().next()); - assertEquals("str", testMap.get("en").get(0)); + assertEquals("str", testMap.get("en").getFirst()); assertNull(FieldInputUtils.createLiteralMapFromString(null)); assertNull(FieldInputUtils.createLiteralMapFromString(new LiteralType())); @@ -48,7 +45,7 @@ void testCreateLiteralMapFromString() { assertNotNull(testMap); assertEquals(1, testMap.size()); assertEquals("def", testMap.keySet().iterator().next()); - assertEquals("str", testMap.get("def").get(0)); + assertEquals("str", testMap.get("def").getFirst()); assertNull(FieldInputUtils.createMapFromString(null)); assertNull(FieldInputUtils.createMapFromString(" ")); @@ -66,7 +63,7 @@ void testCreateResourceOrLiteralMapFromString() { assertNotNull(testMap); assertEquals(1, testMap.size()); assertEquals("en", testMap.keySet().iterator().next()); - assertEquals("str", testMap.get("en").get(0)); + assertEquals("str", testMap.get("en").getFirst()); ResourceOrLiteralType obj2 = new ResourceOrLiteralType(); Resource res = new Resource(); @@ -76,7 +73,7 @@ void testCreateResourceOrLiteralMapFromString() { assertNotNull(testMap2); assertEquals(1, testMap2.size()); assertEquals("def", testMap2.keySet().iterator().next()); - assertEquals("str", testMap2.get("def").get(0)); + assertEquals("str", testMap2.get("def").getFirst()); ResourceOrLiteralType obj3 = new ResourceOrLiteralType(); ResourceOrLiteralType.Lang lang3 = new ResourceOrLiteralType.Lang(); @@ -134,8 +131,8 @@ void testCreateLiteralMapFromList() { assertEquals(2, mapB.size()); assertTrue(mapB.containsKey("def")); assertTrue(mapB.containsKey("en")); - assertEquals("strC", mapB.get("en").get(0)); - assertEquals("strD", mapB.get("def").get(0)); + assertEquals("strC", mapB.get("en").getFirst()); + assertEquals("strD", mapB.get("def").getFirst()); ltE.setString("strE"); ltF.setString("strF"); @@ -190,8 +187,8 @@ void testCreateResourceOrLiteralMapFromList() { assertEquals(2, mapB.size()); assertTrue(mapB.containsKey("def")); assertTrue(mapB.containsKey("en")); - assertEquals("strC", mapB.get("en").get(0)); - assertEquals("strD", mapB.get("def").get(0)); + assertEquals("strC", mapB.get("en").getFirst()); + assertEquals("strD", mapB.get("def").getFirst()); ltE.setString("strE"); ltF.setString("strF"); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/PlaceFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/PlaceFieldInputTest.java index 65ea7e4f8a..80fe540489 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/PlaceFieldInputTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/PlaceFieldInputTest.java @@ -84,17 +84,17 @@ void testPlace() { PlaceImpl placeMongo = new PlaceFieldInput().apply(place); mongoServerMock.getDatastore().save(placeMongo); assertEquals(place.getAbout(), placeMongo.getAbout()); - assertEquals(place.getNoteList().get(0).getString(), - placeMongo.getNote().values().iterator().next().get(0)); + assertEquals(place.getNoteList().getFirst().getString(), + placeMongo.getNote().values().iterator().next().getFirst()); assertTrue( - placeMongo.getAltLabel().containsKey(place.getAltLabelList().get(0).getLang().getLang())); - assertEquals(place.getAltLabelList().get(0).getString(), - placeMongo.getAltLabel().values().iterator().next().get(0)); + placeMongo.getAltLabel().containsKey(place.getAltLabelList().getFirst().getLang().getLang())); + assertEquals(place.getAltLabelList().getFirst().getString(), + placeMongo.getAltLabel().values().iterator().next().getFirst()); - assertEquals(place.getPrefLabelList().get(0).getString(), - placeMongo.getPrefLabel().values().iterator().next().get(0)); - assertEquals(place.getIsPartOfList().get(0).getString(), - placeMongo.getIsPartOf().values().iterator().next().get(0)); + assertEquals(place.getPrefLabelList().getFirst().getString(), + placeMongo.getPrefLabel().values().iterator().next().getFirst()); + assertEquals(place.getIsPartOfList().getFirst().getString(), + placeMongo.getIsPartOf().values().iterator().next().getFirst()); assertEquals(Float.toString(place.getLat().getLat()), Float.toString(placeMongo.getLatitude())); assertEquals(Float.toString(place.getLong().getLong()), Float.toString(placeMongo.getLongitude())); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ProxyFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ProxyFieldInputTest.java index e0a4a80b88..0005d74944 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ProxyFieldInputTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/ProxyFieldInputTest.java @@ -91,151 +91,151 @@ private void testMongo(ProxyType proxy) { for (EuropeanaType.Choice choice : dcterms) { if (choice.ifAlternative()) { assertEquals(choice.getAlternative().getString(), - mongoProxy.getDctermsAlternative().values().iterator().next().get(0)); + mongoProxy.getDctermsAlternative().values().iterator().next().getFirst()); } if (choice.ifConformsTo()) { assertEquals(choice.getConformsTo().getResource().getResource(), - mongoProxy.getDctermsConformsTo().values().iterator().next().get(0)); + mongoProxy.getDctermsConformsTo().values().iterator().next().getFirst()); } if (choice.ifCreated()) { assertEquals(choice.getCreated().getResource().getResource(), - mongoProxy.getDctermsCreated().values().iterator().next().get(0)); + mongoProxy.getDctermsCreated().values().iterator().next().getFirst()); } if (choice.ifExtent()) { assertEquals(choice.getExtent().getResource().getResource(), - mongoProxy.getDctermsExtent().values().iterator().next().get(0)); + mongoProxy.getDctermsExtent().values().iterator().next().getFirst()); } if (choice.ifHasFormat()) { assertEquals(choice.getHasFormat().getResource().getResource(), - mongoProxy.getDctermsHasFormat().values().iterator().next().get(0)); + mongoProxy.getDctermsHasFormat().values().iterator().next().getFirst()); } if (choice.ifHasPart()) { assertEquals(choice.getHasPart().getResource().getResource(), - mongoProxy.getDctermsHasPart().values().iterator().next().get(0)); + mongoProxy.getDctermsHasPart().values().iterator().next().getFirst()); } if (choice.ifHasVersion()) { assertEquals(choice.getHasVersion().getResource().getResource(), - mongoProxy.getDctermsHasVersion().values().iterator().next().get(0)); + mongoProxy.getDctermsHasVersion().values().iterator().next().getFirst()); } if (choice.ifIsFormatOf()) { assertEquals(choice.getIsFormatOf().getResource().getResource(), - mongoProxy.getDctermsIsFormatOf().values().iterator().next().get(0)); + mongoProxy.getDctermsIsFormatOf().values().iterator().next().getFirst()); } if (choice.ifIsPartOf()) { assertEquals(choice.getIsPartOf().getResource().getResource(), - mongoProxy.getDctermsIsPartOf().values().iterator().next().get(0)); + mongoProxy.getDctermsIsPartOf().values().iterator().next().getFirst()); } if (choice.ifIsReferencedBy()) { assertEquals(choice.getIsReferencedBy().getResource().getResource(), - mongoProxy.getDctermsIsReferencedBy().values().iterator().next().get(0)); + mongoProxy.getDctermsIsReferencedBy().values().iterator().next().getFirst()); } if (choice.ifIsReplacedBy()) { assertEquals(choice.getIsReplacedBy().getResource().getResource(), - mongoProxy.getDctermsIsReplacedBy().values().iterator().next().get(0)); + mongoProxy.getDctermsIsReplacedBy().values().iterator().next().getFirst()); } if (choice.ifIsRequiredBy()) { assertEquals(choice.getIsRequiredBy().getResource().getResource(), - mongoProxy.getDctermsIsRequiredBy().values().iterator().next().get(0)); + mongoProxy.getDctermsIsRequiredBy().values().iterator().next().getFirst()); } if (choice.ifIssued()) { assertEquals(choice.getIssued().getResource().getResource(), - mongoProxy.getDctermsIssued().values().iterator().next().get(0)); + mongoProxy.getDctermsIssued().values().iterator().next().getFirst()); } if (choice.ifIsVersionOf()) { assertEquals(choice.getIsVersionOf().getResource().getResource(), - mongoProxy.getDctermsIsVersionOf().values().iterator().next().get(0)); + mongoProxy.getDctermsIsVersionOf().values().iterator().next().getFirst()); } if (choice.ifMedium()) { assertEquals(choice.getMedium().getResource().getResource(), - mongoProxy.getDctermsMedium().values().iterator().next().get(0)); + mongoProxy.getDctermsMedium().values().iterator().next().getFirst()); } if (choice.ifProvenance()) { assertEquals(choice.getProvenance().getResource().getResource(), - mongoProxy.getDctermsProvenance().values().iterator().next().get(0)); + mongoProxy.getDctermsProvenance().values().iterator().next().getFirst()); } if (choice.ifReferences()) { assertEquals(choice.getReferences().getResource().getResource(), - mongoProxy.getDctermsReferences().values().iterator().next().get(0)); + mongoProxy.getDctermsReferences().values().iterator().next().getFirst()); } if (choice.ifReplaces()) { assertEquals(choice.getReplaces().getResource().getResource(), - mongoProxy.getDctermsReplaces().values().iterator().next().get(0)); + mongoProxy.getDctermsReplaces().values().iterator().next().getFirst()); } if (choice.ifRequires()) { assertEquals(choice.getRequires().getResource().getResource(), - mongoProxy.getDctermsRequires().values().iterator().next().get(0)); + mongoProxy.getDctermsRequires().values().iterator().next().getFirst()); } if (choice.ifSpatial()) { assertEquals(choice.getSpatial().getResource().getResource(), - mongoProxy.getDctermsSpatial().values().iterator().next().get(0)); + mongoProxy.getDctermsSpatial().values().iterator().next().getFirst()); } if (choice.ifTableOfContents()) { assertEquals(choice.getTableOfContents().getResource().getResource(), - mongoProxy.getDctermsTOC().values().iterator().next().get(0)); + mongoProxy.getDctermsTOC().values().iterator().next().getFirst()); } if (choice.ifTemporal()) { assertEquals(choice.getTemporal().getResource().getResource(), - mongoProxy.getDctermsTemporal().values().iterator().next().get(0)); + mongoProxy.getDctermsTemporal().values().iterator().next().getFirst()); } if (choice.ifContributor()) { assertEquals(choice.getContributor().getResource().getResource(), - mongoProxy.getDcContributor().values().iterator().next().get(0)); + mongoProxy.getDcContributor().values().iterator().next().getFirst()); } if (choice.ifCoverage()) { assertEquals(choice.getCoverage().getResource().getResource(), - mongoProxy.getDcCoverage().values().iterator().next().get(0)); + mongoProxy.getDcCoverage().values().iterator().next().getFirst()); } if (choice.ifCreator()) { assertEquals(choice.getCreator().getResource().getResource(), - mongoProxy.getDcCreator().values().iterator().next().get(0)); + mongoProxy.getDcCreator().values().iterator().next().getFirst()); } if (choice.ifDate()) { assertEquals(choice.getDate().getResource().getResource(), - mongoProxy.getDcDate().values().iterator().next().get(0)); + mongoProxy.getDcDate().values().iterator().next().getFirst()); } if (choice.ifDescription()) { assertEquals(choice.getDescription().getResource().getResource(), - mongoProxy.getDcDescription().values().iterator().next().get(0)); + mongoProxy.getDcDescription().values().iterator().next().getFirst()); } if (choice.ifFormat()) { assertEquals(choice.getFormat().getResource().getResource(), - mongoProxy.getDcFormat().values().iterator().next().get(0)); + mongoProxy.getDcFormat().values().iterator().next().getFirst()); } if (choice.ifIdentifier()) { assertEquals(choice.getIdentifier().getString(), - mongoProxy.getDcIdentifier().values().iterator().next().get(0)); + mongoProxy.getDcIdentifier().values().iterator().next().getFirst()); } if (choice.ifLanguage()) { assertEquals(choice.getLanguage().getString(), - mongoProxy.getDcLanguage().values().iterator().next().get(0)); + mongoProxy.getDcLanguage().values().iterator().next().getFirst()); } if (choice.ifPublisher()) { assertEquals(choice.getPublisher().getResource().getResource(), - mongoProxy.getDcPublisher().values().iterator().next().get(0)); + mongoProxy.getDcPublisher().values().iterator().next().getFirst()); } if (choice.ifRelation()) { assertEquals(choice.getRelation().getResource().getResource(), - mongoProxy.getDcRelation().values().iterator().next().get(0)); + mongoProxy.getDcRelation().values().iterator().next().getFirst()); } if (choice.ifRights()) { assertEquals(choice.getRights().getResource().getResource(), - mongoProxy.getDcRights().values().iterator().next().get(0)); + mongoProxy.getDcRights().values().iterator().next().getFirst()); } if (choice.ifSource()) { assertEquals(choice.getSource().getResource().getResource(), - mongoProxy.getDcSource().values().iterator().next().get(0)); + mongoProxy.getDcSource().values().iterator().next().getFirst()); } if (choice.ifSubject()) { assertEquals(choice.getSubject().getResource().getResource(), - mongoProxy.getDcSubject().values().iterator().next().get(0)); + mongoProxy.getDcSubject().values().iterator().next().getFirst()); } if (choice.ifTitle()) { assertEquals(choice.getTitle().getString(), - mongoProxy.getDcTitle().values().iterator().next().get(0)); + mongoProxy.getDcTitle().values().iterator().next().getFirst()); } if (choice.ifType()) { assertEquals(choice.getType().getResource().getResource(), - mongoProxy.getDcType().values().iterator().next().get(0)); + mongoProxy.getDcType().values().iterator().next().getFirst()); } } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/TimespanFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/TimespanFieldInputTest.java index 079993b9cd..65d987713a 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/TimespanFieldInputTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/TimespanFieldInputTest.java @@ -57,25 +57,25 @@ void testTimespan() { private static void assertTimespanFieldInput(TimeSpanType timespan, TimespanImpl timespanMongo) { assertEquals(timespan.getAbout(), timespanMongo.getAbout()); assertEquals(timespan.getBegin().getString(), - timespanMongo.getBegin().values().iterator().next().get(0)); + timespanMongo.getBegin().values().iterator().next().getFirst()); assertEquals(timespan.getEnd().getString(), - timespanMongo.getEnd().values().iterator().next().get(0)); - assertEquals(timespan.getNoteList().get(0).getString(), - timespanMongo.getNote().values().iterator().next().get(0)); + timespanMongo.getEnd().values().iterator().next().getFirst()); + assertEquals(timespan.getNoteList().getFirst().getString(), + timespanMongo.getNote().values().iterator().next().getFirst()); assertTrue(timespanMongo.getAltLabel() - .containsKey(timespan.getAltLabelList().get(0).getLang().getLang())); + .containsKey(timespan.getAltLabelList().getFirst().getLang().getLang())); assertTrue(timespanMongo.getPrefLabel() - .containsKey(timespan.getPrefLabelList().get(0).getLang().getLang())); + .containsKey(timespan.getPrefLabelList().getFirst().getLang().getLang())); assertTrue(timespanMongo.getHiddenLabel() - .containsKey(timespan.getHiddenLabelList().get(0).getLang().getLang())); - assertEquals(timespan.getAltLabelList().get(0).getString(), - timespanMongo.getAltLabel().values().iterator().next().get(0)); - assertEquals(timespan.getPrefLabelList().get(0).getString(), - timespanMongo.getPrefLabel().values().iterator().next().get(0)); - assertEquals(timespan.getIsPartOfList().get(0).getResource().getResource(), - timespanMongo.getIsPartOf().values().iterator().next().get(0)); + .containsKey(timespan.getHiddenLabelList().getFirst().getLang().getLang())); + assertEquals(timespan.getAltLabelList().getFirst().getString(), + timespanMongo.getAltLabel().values().iterator().next().getFirst()); + assertEquals(timespan.getPrefLabelList().getFirst().getString(), + timespanMongo.getPrefLabel().values().iterator().next().getFirst()); + assertEquals(timespan.getIsPartOfList().getFirst().getResource().getResource(), + timespanMongo.getIsPartOf().values().iterator().next().getFirst()); assertEquals(timespan.getNotation().getString(), - timespanMongo.getSkosNotation().values().iterator().next().get(0)); + timespanMongo.getSkosNotation().values().iterator().next().getFirst()); } private static TimeSpanType getTimeSpanType() { diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/WebResourceFieldInputTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/WebResourceFieldInputTest.java new file mode 100644 index 0000000000..4eea72284c --- /dev/null +++ b/metis-indexing/src/test/java/eu/europeana/indexing/fullbean/WebResourceFieldInputTest.java @@ -0,0 +1,219 @@ +package eu.europeana.indexing.fullbean; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import eu.europeana.corelib.solr.entity.WebResourceImpl; +import eu.europeana.metis.schema.jibx.BitRate; +import eu.europeana.metis.schema.jibx.CodecName; +import eu.europeana.metis.schema.jibx.ColorSpaceType; +import eu.europeana.metis.schema.jibx.DoubleType; +import eu.europeana.metis.schema.jibx.Duration; +import eu.europeana.metis.schema.jibx.EdmType; +import eu.europeana.metis.schema.jibx.HasColorSpace; +import eu.europeana.metis.schema.jibx.HasMimeType; +import eu.europeana.metis.schema.jibx.Height; +import eu.europeana.metis.schema.jibx.HexBinaryType; +import eu.europeana.metis.schema.jibx.LongType; +import eu.europeana.metis.schema.jibx.OrientationType; +import eu.europeana.metis.schema.jibx.Type2; +import eu.europeana.metis.schema.jibx.WebResourceType; +import eu.europeana.metis.schema.jibx.Width; +import eu.europeana.metis.schema.model.Orientation; +import java.math.BigInteger; +import java.util.List; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.Test; + +class WebResourceFieldInputTest { + + @Test + void testImageWebResource() { + final WebResourceType webResourceType = getWebResourceImage(); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertImageWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedJsonImageWebResource() { + final WebResourceType webResourceType = getWebResourceImage(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/json+oembed"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertImageWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedXmlImageWebResource() { + final WebResourceType webResourceType = getWebResourceImage(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/xml+oembed"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertImageWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedJsonImageWebResource_Null_WebResourceMetaInfo() { + final WebResourceType webResourceType = getWebResourceImage(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/json"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertNull(webResourceImpl.getWebResourceMetaInfo()); + } + + @Test + void testOEmbedXmlImageWebResource_NotNull_WebResourceMetaInfo() { + final WebResourceType webResourceType = getWebResourceImage(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/xml"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertNotNull(webResourceImpl.getWebResourceMetaInfo().getTextMetaInfo()); + } + + @Test + void testVideoWebResource() { + final WebResourceType webResourceType = getWebResourceVideo(); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertVideoWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedJsonVideoVideoWebResource() { + final WebResourceType webResourceType = getWebResourceVideo(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/json+oembed"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertVideoWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedXmlVideoVideoWebResource() { + final WebResourceType webResourceType = getWebResourceVideo(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/xml+oembed"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertVideoWebResourceImpl(webResourceType, webResourceImpl); + } + + @Test + void testOEmbedJsonVideoWebResource_Null_WebResourceMetaInfo() { + final WebResourceType webResourceType = getWebResourceVideo(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/json"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertNull(webResourceImpl.getWebResourceMetaInfo()); + } + + @Test + void testOEmbedXmlVideoWebResource_NotNull_WebResourceMetaInfo() { + final WebResourceType webResourceType = getWebResourceVideo(); + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("application/xml"); + webResourceType.setHasMimeType(hasMimeType); + WebResourceImpl webResourceImpl = new WebResourceFieldInput().apply(webResourceType); + + assertNotNull(webResourceImpl.getWebResourceMetaInfo().getTextMetaInfo()); + } + + private static void assertImageWebResourceImpl(WebResourceType webResourceType, WebResourceImpl webResourceImpl) { + assertEquals(webResourceType.getHasMimeType().getHasMimeType(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getMimeType()); + assertEquals(webResourceType.getFileByteSize().getLong(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getFileSize()); + assertEquals(Long.valueOf(webResourceType.getHeight().getLong()).intValue(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getHeight()); + assertEquals(Long.valueOf(webResourceType.getWidth().getLong()).intValue(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getWidth()); + assertEquals(webResourceType.getHasColorSpace().getHasColorSpace().xmlValue(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getColorSpace()); + assertEquals(webResourceType.getOrientation().getString(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getOrientation().name()); + assertArrayEquals(webResourceType.getComponentColorList().stream().map(HexBinaryType::getString).toList().toArray(), webResourceImpl.getWebResourceMetaInfo().getImageMetaInfo().getColorPalette()); + } + + private static void assertVideoWebResourceImpl(WebResourceType webResourceType, WebResourceImpl webResourceImpl) { + assertEquals(webResourceType.getHasMimeType().getHasMimeType(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getMimeType()); + assertEquals(webResourceType.getFileByteSize().getLong(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getFileSize()); + assertEquals(Long.valueOf(webResourceType.getHeight().getLong()).intValue(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getHeight()); + assertEquals(Long.valueOf(webResourceType.getWidth().getLong()).intValue(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getWidth()); + assertEquals(webResourceType.getBitRate().getInteger().intValue(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getBitRate()); + assertEquals(webResourceType.getCodecName().getCodecName(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getCodec()); + assertEquals(webResourceType.getFrameRate().getDouble(), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getFrameRate()); + assertEquals(Long.valueOf(webResourceType.getDuration().getDuration()), webResourceImpl.getWebResourceMetaInfo().getVideoMetaInfo().getDuration()); + } + + private static @NotNull WebResourceType getWebResourceImage() { + WebResourceType webResourceType = new WebResourceType(); + final Type2 type2 = new Type2(); + type2.setType(EdmType.IMAGE); + webResourceType.setType1(type2); + final LongType longType = new LongType(); + longType.setLong(1000L); + webResourceType.setFileByteSize(longType); + final Height height = new Height(); + height.setLong(500L); + webResourceType.setHeight(height); + final Width width = new Width(); + width.setLong(500L); + webResourceType.setWidth(width); + final HasColorSpace hasColorSpace = new HasColorSpace(); + hasColorSpace.setHasColorSpace(ColorSpaceType.CMY); + webResourceType.setHasColorSpace(hasColorSpace); + final OrientationType orientationType = new OrientationType(); + orientationType.setString(Orientation.LANDSCAPE.name()); + webResourceType.setOrientation(orientationType); + final HexBinaryType hexBinaryType = new HexBinaryType(); + hexBinaryType.setString("#8FBC8F"); + webResourceType.setComponentColorList(List.of(hexBinaryType)); + + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("image/jpeg"); + webResourceType.setHasMimeType(hasMimeType); + return webResourceType; + } + + private static @NotNull WebResourceType getWebResourceVideo() { + WebResourceType webResourceType = new WebResourceType(); + final Type2 type2 = new Type2(); + type2.setType(EdmType.VIDEO); + webResourceType.setType1(type2); + final LongType longType = new LongType(); + longType.setLong(1000L); + webResourceType.setFileByteSize(longType); + final Height height = new Height(); + height.setLong(500L); + webResourceType.setHeight(height); + final Width width = new Width(); + width.setLong(500L); + webResourceType.setWidth(width); + final BitRate bitRate = new BitRate(); + bitRate.setInteger(new BigInteger("1000")); + webResourceType.setBitRate(bitRate); + final CodecName codecName = new CodecName(); + codecName.setCodecName("codec"); + webResourceType.setCodecName(codecName); + final DoubleType doubleType = new DoubleType(); + doubleType.setDouble((double) 1000L); + webResourceType.setFrameRate(doubleType); + final Duration duration = new Duration(); + duration.setDuration("1000"); + webResourceType.setDuration(duration); + + final HasMimeType hasMimeType = new HasMimeType(); + hasMimeType.setHasMimeType("video/mpeg"); + webResourceType.setHasMimeType(hasMimeType); + return webResourceType; + } +} diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdaterTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdaterTest.java index adbbba4dbc..3c5ab9d14c 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdaterTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/EuropeanaAggregationUpdaterTest.java @@ -1,14 +1,22 @@ package eu.europeana.indexing.mongo; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import eu.europeana.corelib.definitions.edm.entity.EuropeanaAggregation; import eu.europeana.corelib.solr.entity.EuropeanaAggregationImpl; import eu.europeana.indexing.mongo.property.MongoPropertyUpdater; import eu.europeana.indexing.mongo.property.RootAboutWrapper; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; class EuropeanaAggregationUpdaterTest extends MongoEntityUpdaterTest<EuropeanaAggregationImpl> { @@ -49,6 +57,15 @@ void testUpdate() { testWebResourcesPropertyUpdate(propertyUpdater, "webResources", EuropeanaAggregationImpl::setWebResources, rootAbout); + final EuropeanaAggregationImpl europeanaAggregation = new EuropeanaAggregationImpl(); + europeanaAggregation.setChangeLog(new ArrayList<>()); + @SuppressWarnings("unchecked") + final ArgumentCaptor<Function<EuropeanaAggregationImpl, List<Object>>> getterCaptor = ArgumentCaptor + .forClass(Function.class); + verify(propertyUpdater, times(1)) + .updateObjectList(eq("changeLog"), getterCaptor.capture()); + assertSame(europeanaAggregation.getChangeLog(), getterCaptor.getValue().apply(europeanaAggregation)); + // And that should be it. verifyNoMoreInteractions(propertyUpdater); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoEntityUpdaterTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoEntityUpdaterTest.java index 88623d3c3c..3202a866d5 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoEntityUpdaterTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoEntityUpdaterTest.java @@ -22,7 +22,6 @@ import java.util.function.Function; import java.util.function.Supplier; import java.util.function.UnaryOperator; -import java.util.stream.Collectors; import org.mockito.ArgumentCaptor; abstract class MongoEntityUpdaterTest<T> { @@ -126,8 +125,7 @@ void testWebResourcesPropertyUpdate(MongoPropertyUpdater<T> propertyUpdater, Str verify(propertyUpdater, times(1)) .updateWebResources(eq(fieldName), getterCaptor.capture(), same(rootAbout), any()); assertEquals(Collections.singletonList(aboutValue), - getterCaptor.getValue().apply(testEntity).stream().map(WebResource::getAbout) - .collect(Collectors.toList())); + getterCaptor.getValue().apply(testEntity).stream().map(WebResource::getAbout).toList()); } <F extends AbstractEdmEntity, A> void testReferencedEntitiesPropertyUpdate( @@ -153,8 +151,7 @@ <F extends AbstractEdmEntity, A> void testReferencedEntitiesPropertyUpdate( .updateReferencedEntities(eq(fieldName), getterCaptor.capture(), ancestorInfoCreatorCaptor.capture(), updaterCaptor.capture()); assertEquals(Collections.singletonList(aboutValue), - getterCaptor.getValue().apply(testEntity).stream().map(AbstractEdmEntity::getAbout) - .collect(Collectors.toList())); + getterCaptor.getValue().apply(testEntity).stream().map(AbstractEdmEntity::getAbout).toList()); if (ancestorInfoType == null) { assertNull(ancestorInfoCreatorCaptor.getValue().apply(testEntity)); } else { diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoIndexerTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoIndexerTest.java index af52418abc..a086a85590 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoIndexerTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/mongo/MongoIndexerTest.java @@ -151,9 +151,10 @@ MongoClient mongoClient(MongoProperties mongoProperties) throws Exception { */ @Bean MongoIndexingSettings mongoIndexingSettings(MongoProperties mongoProperties, @Value("${mongo.db}") String mongoDatabase, - @Value("${mongo.redirect.db}") String mongoRedirectDatabase) throws SetupRelatedIndexingException { + @Value("${mongo.tombstone.db}") String mongoTombstoneDatabase, @Value("${mongo.redirect.db}") String mongoRedirectDatabase) throws SetupRelatedIndexingException { MongoIndexingSettings mongoIndexingSettings = new MongoIndexingSettings(mongoProperties); mongoIndexingSettings.setMongoDatabaseName(mongoDatabase); + mongoIndexingSettings.setMongoTombstoneDatabaseName(mongoTombstoneDatabase); mongoIndexingSettings.setRecordRedirectDatabaseName(mongoRedirectDatabase); IndexingProperties indexingProperties = new IndexingProperties(Date.from(Instant.now()), true, diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrDocumentPopulatorTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrDocumentPopulatorTest.java index afdf3c104a..58ae1194b3 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrDocumentPopulatorTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrDocumentPopulatorTest.java @@ -1,8 +1,14 @@ package eu.europeana.indexing.solr; import static eu.europeana.indexing.solr.EdmLabel.COVERAGE_LOCATION_WGS; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE_BEGIN; +import static eu.europeana.indexing.solr.EdmLabel.CREATED_DATE_END; import static eu.europeana.indexing.solr.EdmLabel.CURRENT_LOCATION_WGS; import static eu.europeana.indexing.solr.EdmLabel.EUROPEANA_ID; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE_BEGIN; +import static eu.europeana.indexing.solr.EdmLabel.ISSUED_DATE_END; import static eu.europeana.indexing.solr.EdmLabel.LOCATION_WGS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -46,6 +52,7 @@ void populateWithProperties_PlaceCoordinates() throws Exception { final SolrInputDocument document = new SolrInputDocument(); documentPopulator.populateWithProperties(document, fullBean); documentPopulator.populateWithFacets(document, rdfWrapper); + documentPopulator.populateWithDateRanges(document, rdfWrapper); assertTrue(document.get(EUROPEANA_ID.toString()).getValues().contains(fullBean.getAbout())); assertEquals( "2", document.getFieldValue(EdmLabel.CONTENT_TIER.toString())); @@ -77,6 +84,7 @@ void populateWithProperties_WGS84Coordinates() throws Exception { final SolrInputDocument document = new SolrInputDocument(); documentPopulator.populateWithProperties(document, fullBean); documentPopulator.populateWithFacets(document, rdfWrapper); + documentPopulator.populateWithDateRanges(document, rdfWrapper); assertTrue(document.get(EUROPEANA_ID.toString()).getValues().contains(fullBean.getAbout())); assertTrue(CollectionUtils.isEqualCollection(document.get(CURRENT_LOCATION_WGS.toString()).getValues(), @@ -86,4 +94,34 @@ void populateWithProperties_WGS84Coordinates() throws Exception { assertTrue(CollectionUtils.isEqualCollection(document.get(LOCATION_WGS.toString()).getValues(), List.of("50,50", "40,40", "40.123456,40.1234567", "50.75,4.5"))); } + + @Test + void populateWithDateRanges() throws Exception { + String xml = IOUtils.toString(new FileInputStream("src/test/resources/europeana_record_with_normalized_date_timespan.xml"), + StandardCharsets.UTF_8); + final RDF rdf = new RdfConversionUtils().convertStringToRdf(xml); + + // Perform the tier classification + final RdfWrapper rdfWrapper = new RdfWrapper(rdf); + + final RdfToFullBeanConverter fullBeanConverter = new RdfToFullBeanConverter(); + final FullBeanImpl fullBean = fullBeanConverter.convertRdfToFullBean(rdfWrapper); + + // Create Solr document. + final SolrDocumentPopulator documentPopulator = new SolrDocumentPopulator(); + final SolrInputDocument document = new SolrInputDocument(); + documentPopulator.populateWithProperties(document, fullBean); + documentPopulator.populateWithFacets(document, rdfWrapper); + documentPopulator.populateWithDateRanges(document, rdfWrapper); + + assertTrue(document.get(EUROPEANA_ID.toString()).getValues().contains(fullBean.getAbout())); + assertTrue(CollectionUtils.isEqualCollection(document.get(CREATED_DATE.toString()).getValues(), + List.of("[1426-01-01 TO 1450-12-31]", "[1942-01-01 TO 1942-12-31]"))); + assertEquals("1426-01-01", document.get(CREATED_DATE_BEGIN.toString()).getValue()); + assertEquals("1942-12-31", document.get(CREATED_DATE_END.toString()).getValue()); + assertTrue(CollectionUtils.isEqualCollection(document.get(ISSUED_DATE.toString()).getValues(), + List.of("[1942-01-01 TO 1942-12-31]"))); + assertEquals("1942-01-01", document.get(ISSUED_DATE_BEGIN.toString()).getValue()); + assertEquals("1942-12-31", document.get(ISSUED_DATE_END.toString()).getValue()); + } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrIndexerTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrIndexerTest.java index 4925d987b6..e91d59224a 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrIndexerTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/solr/SolrIndexerTest.java @@ -85,7 +85,7 @@ private SolrDocument flushAndAssertDocumentInSolr(String expectedId, int expecte assertThatDocumentFieldsExist(document); }); assertEquals(expectedSize, documents.size()); - return documents.get(0); + return documents.getFirst(); } private void assertThatDocumentFieldsExist(SolrDocument document) { diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/solr/facet/FacetEncoderTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/solr/facet/FacetEncoderTest.java index 0cfbecc9fa..f9e91d3a2d 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/solr/facet/FacetEncoderTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/solr/facet/FacetEncoderTest.java @@ -227,7 +227,7 @@ void getTextFacetSearchCodes() { void getFacetFilterCodes() { List<WebResourceWrapper> webResourceWrappers = getBasicWebResourceWrappers(); - Set<Integer> result = encoder.getFacetFilterCodes(webResourceWrappers.get(0)); + Set<Integer> result = encoder.getFacetFilterCodes(webResourceWrappers.getFirst()); assertEquals(0, result.size()); } @@ -265,7 +265,7 @@ void getTextFacetFilterCodes() { void getFacetValueCodes() { List<WebResourceWrapper> webResourceWrappers = getBasicWebResourceWrappers(); - Set<Integer> result = encoder.getFacetValueCodes(webResourceWrappers.get(0)); + Set<Integer> result = encoder.getFacetValueCodes(webResourceWrappers.getFirst()); assertEquals(0, result.size()); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/EuropeanaAggregationSolrCreatorTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/EuropeanaAggregationSolrCreatorTest.java index da27b1ff2d..bdc31e825d 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/EuropeanaAggregationSolrCreatorTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/EuropeanaAggregationSolrCreatorTest.java @@ -7,14 +7,11 @@ import eu.europeana.corelib.definitions.edm.entity.EuropeanaAggregation; import eu.europeana.corelib.definitions.edm.entity.License; -import eu.europeana.corelib.definitions.edm.entity.QualityAnnotation; import eu.europeana.corelib.definitions.edm.entity.WebResource; import eu.europeana.corelib.solr.entity.EuropeanaAggregationImpl; import eu.europeana.corelib.solr.entity.LicenseImpl; -import eu.europeana.corelib.solr.entity.QualityAnnotationImpl; import eu.europeana.corelib.solr.entity.WebResourceImpl; import eu.europeana.indexing.solr.EdmLabel; -import eu.europeana.indexing.utils.RdfTier; import java.util.List; import java.util.Map; import org.apache.solr.common.SolrInputDocument; diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/PropertySolrCreatorTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/PropertySolrCreatorTest.java index 27ca6cd819..3686d7dc03 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/PropertySolrCreatorTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/solr/property/PropertySolrCreatorTest.java @@ -2,6 +2,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -15,19 +16,13 @@ /** * Unit test for {@link ProxySolrCreator} class */ -public class PropertySolrCreatorTest { +class PropertySolrCreatorTest { @Test - public void testAddAllToDocument() { + void testAddAllToDocument() { - // Create property creator that does nothing - final PropertySolrCreator<PlaceImpl> creator = spy(new PropertySolrCreator<PlaceImpl>() { - - @Override - public void addToDocument(SolrInputDocument document, PlaceImpl property) { - } - - }); + final PropertySolrCreator<PlaceImpl> creator = spy(PropertySolrCreator.class); + doNothing().when(creator).addToDocument(any(SolrInputDocument.class), any(PlaceImpl.class)); // Create two properties that will be used, and one that won't. final PlaceImpl property1 = new PlaceImpl(); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/media/EmbeddableMediaTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/media/EmbeddableMediaTest.java index 5d03c2b8d2..d28baed8d7 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/media/EmbeddableMediaTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/media/EmbeddableMediaTest.java @@ -1,11 +1,15 @@ package eu.europeana.indexing.tiers.media; +import static org.apache.commons.lang3.StringUtils.isNoneBlank; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import eu.europeana.indexing.utils.RdfWrapper; +import eu.europeana.metis.schema.jibx.HasMimeType; +import eu.europeana.metis.schema.jibx.WebResourceType; +import java.util.List; import java.util.Set; import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; @@ -17,51 +21,77 @@ */ class EmbeddableMediaTest { + private static WebResourceType getResource(String mimeType) { + WebResourceType webResourceType = new WebResourceType(); + if (isNoneBlank(mimeType)) { + HasMimeType hasMimeTypeXml = new HasMimeType(); + hasMimeTypeXml.setHasMimeType(mimeType); + webResourceType.setHasMimeType(hasMimeTypeXml); + } + + return webResourceType; + } + + private static Stream<Arguments> embeddableMedia() { + return Stream.of( + Arguments.of("http://sounds.bl.uk/embed/", true, List.of()), + + Arguments.of("http://eusounds.ait.co.at/player/", true, List.of()), + Arguments.of("http://www.dismarc.org/player/", true, List.of()), + + Arguments.of("http://api.picturepipe.net/api/html/widgets/public/playout_cloudfront?token=", true, List.of()), + Arguments.of("http://archives.crem-cnrs.fr/archives/items/", true, List.of()), + Arguments.of("http://www.ccma.cat/tv3/alacarta/programa/titol/video/", true, List.of()), + Arguments.of("http://www.ina.fr/*/video/", true, List.of()), + Arguments.of("http://www.ina.fr/video/", true, List.of()), + Arguments.of("http://www.theeuropeanlibrary.org/tel4/newspapers/issue/fullscreen/", true, List.of()), + Arguments.of("https://api.picturepipe.net/api/html/widgets/public/playout_cloudfront?token=", true, List.of()), + + Arguments.of("http://www.euscreen.eu/item.html", true, List.of()), + Arguments.of("https://www.euscreen.eu/item.html*", true, List.of()), + + Arguments.of("https://sketchfab.com/3d-models", true, List.of()), + Arguments.of("https://sketchfab.com/models/", true, List.of()), + Arguments.of("https://skfb.ly/", true, List.of()), + + Arguments.of("http://soundcloud.com/", true, List.of()), + Arguments.of("https://soundcloud.com/", true, List.of()), + + Arguments.of("http://player.vimeo.com/video/", true, List.of()), + Arguments.of("http://vimeo.com/", true, List.of()), + Arguments.of("https://player.vimeo.com/video/", true, List.of()), + Arguments.of("https://vimeo.com/", true, List.of()), + + Arguments.of("https://*.youtube.com/v/", true, List.of()), + Arguments.of("https://*.youtube.com/watch", true, List.of()), + Arguments.of("https://youtu.be/", true, List.of()), + Arguments.of("https://www.google.com", false, List.of()), + Arguments.of("https://get.webgl.org/", false, List.of()), + Arguments.of("https://getemoji.com/", false, List.of()), + Arguments.of("https://www.cssfontstack.com/", false, List.of()), + Arguments.of("https://api64.ipify.org/?format=json", false, List.of()), + + Arguments.of("https://oembed.com/api/oembed.xml?url=https%3A%2F%2Fvimeo.com%2F24416915", true, + List.of(getResource("application/xml+oembed"))), + Arguments.of("https://oembed.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F24416915", true, + List.of(getResource("application/json+oembed"))), + Arguments.of("https://oembed.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F24416915", false, + List.of(getResource(null))), + Arguments.of("https://oembed.com/api/oembed?url=https%3A%2F%2Fvimeo.com%2F24416915", false, + List.of(getResource("image/jpeg"))), + Arguments.of("https://oembed.com/api/oembed?url=https%3A%2F%2Fvimeo.com%2F24416915", false, + List.of(getResource("video/mp4"))) + ); + } + @ParameterizedTest @MethodSource("embeddableMedia") - void hasEmbeddableMedia(String url, boolean expectedEmbeddable) { + void hasEmbeddableMedia(String url, boolean expectedEmbeddable, List<WebResourceType> resourceTypeList) { + final RdfWrapper entity = mock(RdfWrapper.class); when(entity.getUrlsOfTypes(any())).thenReturn(Set.of(url)); - assertEquals(expectedEmbeddable, EmbeddableMedia.hasEmbeddableMedia(entity)); - } + when(entity.getWebResources()).thenReturn(resourceTypeList); - private static Stream<Arguments> embeddableMedia() { - return Stream.of(Arguments.of("http://sounds.bl.uk/embed/", true), - - Arguments.of("http://eusounds.ait.co.at/player/", true), - Arguments.of("http://www.dismarc.org/player/", true), - - Arguments.of("http://api.picturepipe.net/api/html/widgets/public/playout_cloudfront?token=", true), - Arguments.of("http://archives.crem-cnrs.fr/archives/items/", true), - Arguments.of("http://www.ccma.cat/tv3/alacarta/programa/titol/video/", true), - Arguments.of("http://www.ina.fr/*/video/", true), - Arguments.of("http://www.ina.fr/video/", true), - Arguments.of("http://www.theeuropeanlibrary.org/tel4/newspapers/issue/fullscreen/", true), - Arguments.of("https://api.picturepipe.net/api/html/widgets/public/playout_cloudfront?token=", true), - - Arguments.of("http://www.euscreen.eu/item.html", true), - Arguments.of("https://www.euscreen.eu/item.html*", true), - - Arguments.of("https://sketchfab.com/3d-models", true), - Arguments.of("https://sketchfab.com/models/", true), - Arguments.of("https://skfb.ly/", true), - - Arguments.of("http://soundcloud.com/", true), - Arguments.of("https://soundcloud.com/", true), - - Arguments.of("http://player.vimeo.com/video/", true), - Arguments.of("http://vimeo.com/", true), - Arguments.of("https://player.vimeo.com/video/", true), - Arguments.of("https://vimeo.com/", true), - - Arguments.of("https://*.youtube.com/v/", true), - Arguments.of("https://*.youtube.com/watch", true), - Arguments.of("https://youtu.be/", true), - - Arguments.of("https://www.google.com", false), - Arguments.of("https://get.webgl.org/", false), - Arguments.of("https://getemoji.com/", false), - Arguments.of("https://www.cssfontstack.com/", false), - Arguments.of("https://api64.ipify.org/?format=json", false)); + assertEquals(expectedEmbeddable, EmbeddableMedia.hasEmbeddableMedia(entity)); } } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ContextualClassesBreakdownClassifierTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ContextualClassesBreakdownClassifierTest.java index 45552f9541..7efcb3b87c 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ContextualClassesBreakdownClassifierTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ContextualClassesBreakdownClassifierTest.java @@ -246,7 +246,7 @@ void testEntityQualifiesForConcept() { // Set values concept.setChoiceList( - IntStream.range(0, 7).mapToObj(index -> new Choice()).collect(Collectors.toList())); + IntStream.range(0, 7).mapToObj(index -> new Choice()).toList()); concept.getChoiceList().get(0).setPrefLabel(prefLabel); concept.getChoiceList().get(1).setNote(note); concept.getChoiceList().get(2).setBroader(broader); @@ -413,11 +413,11 @@ void countQualifyingContextualClassTypes() { final String unlinkedPlaceAbout = "unlinkedPlaceAbout"; final String existingTimespanAbout = "existingTimeSpanAbout"; final String absentTimespanAbout = "absentTimeSpanAbout"; - agents.get(0).setAbout(agentAbout); - concepts.get(0).setAbout(conceptAbout); + agents.getFirst().setAbout(agentAbout); + concepts.getFirst().setAbout(conceptAbout); places.get(0).setAbout(linkedPlaceAbout); places.get(1).setAbout(unlinkedPlaceAbout); - timeSpans.get(0).setAbout(existingTimespanAbout); + timeSpans.getFirst().setAbout(existingTimespanAbout); // Create links to most objects and check that they are indeed obtainable. final List<ProxyType> proxies = Arrays.asList(new ProxyType(), new ProxyType()); @@ -438,31 +438,31 @@ void countQualifyingContextualClassTypes() { .collect(Collectors.toSet())); // Do the tests for no qualifying entities (except the unlinked one). - doReturn(false).when(classifier).entityQualifies(agents.get(0)); - doReturn(false).when(classifier).entityQualifies(concepts.get(0)); + doReturn(false).when(classifier).entityQualifies(agents.getFirst()); + doReturn(false).when(classifier).entityQualifies(concepts.getFirst()); doReturn(false).when(classifier).entityQualifies(places.get(0)); doReturn(true).when(classifier).entityQualifies(places.get(1)); - doReturn(false).when(classifier).entityQualifies(timeSpans.get(0)); + doReturn(false).when(classifier).entityQualifies(timeSpans.getFirst()); assertEquals(0, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); // Make some of them qualifying and do the tests again. - doReturn(true).when(classifier).entityQualifies(agents.get(0)); + doReturn(true).when(classifier).entityQualifies(agents.getFirst()); assertEquals(1, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(true).when(classifier).entityQualifies(concepts.get(0)); + doReturn(true).when(classifier).entityQualifies(concepts.getFirst()); assertEquals(2, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(true).when(classifier).entityQualifies(places.get(0)); + doReturn(true).when(classifier).entityQualifies(places.getFirst()); assertEquals(3, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(true).when(classifier).entityQualifies(timeSpans.get(0)); + doReturn(true).when(classifier).entityQualifies(timeSpans.getFirst()); assertEquals(4, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); // Make some of them non-qualifying and do the tests again. - doReturn(false).when(classifier).entityQualifies(agents.get(0)); + doReturn(false).when(classifier).entityQualifies(agents.getFirst()); assertEquals(3, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(false).when(classifier).entityQualifies(concepts.get(0)); + doReturn(false).when(classifier).entityQualifies(concepts.getFirst()); assertEquals(2, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(false).when(classifier).entityQualifies(places.get(0)); + doReturn(false).when(classifier).entityQualifies(places.getFirst()); assertEquals(1, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); - doReturn(false).when(classifier).entityQualifies(timeSpans.get(0)); + doReturn(false).when(classifier).entityQualifies(timeSpans.getFirst()); assertEquals(0, classifier.countQualifyingContextualClassTypes(entity).getCompleteContextualResources()); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementTest.java index 201c601c93..b57726bc42 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementTest.java @@ -45,7 +45,7 @@ private <T extends ResourceOrLiteralType> void testChoiceElementWithFixedGroup(E final BiConsumer<ProxyType, T> wrappedSetter = (proxy, value) -> { proxy.setChoiceList(new ArrayList<>()); proxy.getChoiceList().add(new Choice()); - setter.accept(proxy.getChoiceList().get(0), value); + setter.accept(proxy.getChoiceList().getFirst(), value); }; testElementWithFixedGroup(element, group, constructor, wrappedSetter); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementsBreakdownClassifierTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementsBreakdownClassifierTest.java index e01ccc7f1a..b51b4dc773 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementsBreakdownClassifierTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/EnablingElementsBreakdownClassifierTest.java @@ -33,7 +33,6 @@ import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; -import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; @@ -190,7 +189,7 @@ void testAnalyzeForElements(ClassifierMode classifierMode) { hasMet.setResource(link); final ProxyType proxy = new ProxyType(); proxy.setChoiceList(Collections.singletonList(new Choice())); - proxy.getChoiceList().get(0).setCreated(created); + proxy.getChoiceList().getFirst().setCreated(created); proxy.setHasMetList(Collections.singletonList(hasMet)); final List<ProxyType> proxies = Collections.singletonList(proxy); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageClassifierTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageClassifierTest.java index aa7a626811..9d47a0a684 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageClassifierTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageClassifierTest.java @@ -27,7 +27,6 @@ import java.util.Collections; import java.util.List; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.EnumSource; @@ -151,7 +150,7 @@ void testAddProxyToStatistics(ClassifierMode classifierMode) { verify(statistics, times(1)) .addToStatistics(same(proxy.getIsRelatedToList()), eq(PropertyType.EDM_IS_RELATED_TO)); verify(statistics, times(1)) - .addToStatistics(same(proxy.getChoiceList().get(0))); + .addToStatistics(same(proxy.getChoiceList().getFirst())); verify(statistics, times(1)).addToStatistics(isNull()); verifyNoMoreInteractions(statistics); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageTagStatisticsTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageTagStatisticsTest.java index 12c047508d..f8b21b22e1 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageTagStatisticsTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/LanguageTagStatisticsTest.java @@ -87,11 +87,11 @@ void testConstruction() { assertTrue(statistics1.containsContextualClass(about3)); assertTrue(statistics1.containsContextualClass(about4)); - // Now make some preflabels invalid. - place1.getPrefLabelList().get(0).setLang(new Lang()); - place2.getPrefLabelList().get(0).setLang(null); - timeSpan.getPrefLabelList().get(0).setString(" "); - concept.getChoiceList().get(0).getPrefLabel().getLang().setLang(" "); + // Now make some prefLabels invalid. + place1.getPrefLabelList().getFirst().setLang(new Lang()); + place2.getPrefLabelList().getFirst().setLang(null); + timeSpan.getPrefLabelList().getFirst().setString(" "); + concept.getChoiceList().getFirst().getPrefLabel().getLang().setLang(" "); final LanguageTagStatistics statistics2 = new LanguageTagStatistics( Arrays.asList(place1, place2), Collections.singletonList(timeSpan), Collections.singletonList(concept)); @@ -102,13 +102,13 @@ void testConstruction() { Collections.singletonList(concept)); assertEquals(Collections.emptySet(), statistics3.getContextualClassesWithLanguage()); - // Now make some preflabel collections invalid + // Now make some prefLabel collections invalid place1.setPrefLabelList(Collections.emptyList()); place2.setPrefLabelList(null); concept.getChoiceList().forEach(Concept.Choice::clearChoiceListSelect); - concept.getChoiceList().get(0).setAltLabel(new AltLabel()); - concept.getChoiceList().get(0).getAltLabel().setString("altLabelValue"); - concept.getChoiceList().get(0).getAltLabel().setLang(createLang("altLabelLanguage")); + concept.getChoiceList().getFirst().setAltLabel(new AltLabel()); + concept.getChoiceList().getFirst().getAltLabel().setString("altLabelValue"); + concept.getChoiceList().getFirst().getAltLabel().setLang(createLang("altLabelLanguage")); concept.getChoiceList().get(1).setAltLabel(null); final LanguageTagStatistics statistics4 = new LanguageTagStatistics( Arrays.asList(place1, place2), Collections.singletonList(timeSpan), @@ -301,10 +301,9 @@ void testAddToStatisticsForResourceOrLiteralTypes() { final PropertyType propertyType = PropertyType.DC_FORMAT; // Test sanity check - assertThrows(IllegalArgumentException.class, - () -> statistics.addToStatistics(Arrays.asList(valid1, valid2), null)); - assertThrows(IllegalArgumentException.class, - () -> statistics.addToStatistics((List<ResourceOrLiteralType>) null, null)); + final List<ResourceOrLiteralType> resourceOrLiteralTypes = Arrays.asList(valid1, valid2); + assertThrows(IllegalArgumentException.class, () -> statistics.addToStatistics(resourceOrLiteralTypes, null)); + assertThrows(IllegalArgumentException.class, () -> statistics.addToStatistics((List<ResourceOrLiteralType>) null, null)); statistics.addToStatistics((List<ResourceOrLiteralType>) null, propertyType); verify(statistics, never()).addToStatistics(any(ResourceOrLiteralType.class), any()); diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ResourceLinkFromProxyTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ResourceLinkFromProxyTest.java index b46329ffde..035021b5cf 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ResourceLinkFromProxyTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/tiers/metadata/ResourceLinkFromProxyTest.java @@ -59,7 +59,6 @@ import java.util.List; import java.util.function.BiConsumer; import java.util.function.Supplier; -import java.util.stream.Collectors; import org.junit.jupiter.api.Test; class ResourceLinkFromProxyTest { @@ -95,8 +94,7 @@ private <T extends ResourceType> void testGetLink(ResourceLinkFromProxy resource // Test with actual value final String testResource = "test resource"; value.setResource(testResource); - final List<String> links = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + final List<String> links = resource.getLinkAndValueGetter().getLinks(proxy).toList(); assertEquals(Collections.singletonList(testResource), links); assertNoValues(proxy, resource); } @@ -144,15 +142,13 @@ private <T extends ResourceType> void testGetLinks(ResourceLinkFromProxy resourc final String testResource2 = "test resource 2"; value1.setResource(testResource1); value2.setResource(testResource2); - final List<String> linksDifferent = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + final List<String> linksDifferent = resource.getLinkAndValueGetter().getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource2), linksDifferent); assertNoValues(proxy, resource); // Test with the same values value2.setResource(testResource1); - final List<String> linksSame = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + final List<String> linksSame = resource.getLinkAndValueGetter().getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource1), linksSame); assertNoValues(proxy, resource); } @@ -193,9 +189,9 @@ private <T extends ResourceOrLiteralType> void testGetLinkAndValue(ResourceLinkF value.getResource().setResource(testResource); value.setString(testLiteral); final List<String> links = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + .getLinks(proxy).toList(); final List<String> values = resource.getLinkAndValueGetter() - .getValues(proxy).collect(Collectors.toList()); + .getValues(proxy).toList(); assertEquals(Collections.singletonList(testResource), links); assertEquals(Collections.singletonList(testLiteral), values); } @@ -242,20 +238,20 @@ private <T extends ResourceOrLiteralType> void testGetLinksAndValues( value1.setString(testLiteral1); value2.setString(testLiteral2); final List<String> linksDifferent = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + .getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource2), linksDifferent); final List<String> valuesDifferent = resource.getLinkAndValueGetter() - .getValues(proxy).collect(Collectors.toList()); + .getValues(proxy).toList(); assertEquals(Arrays.asList(testLiteral1, testLiteral2), valuesDifferent); // Test with the same values value2.getResource().setResource(testResource1); value2.setString(testLiteral1); final List<String> linksSame = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + .getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource1), linksSame); final List<String> valuesSame = resource.getLinkAndValueGetter() - .getValues(proxy).collect(Collectors.toList()); + .getValues(proxy).toList(); assertEquals(Arrays.asList(testLiteral1, testLiteral1), valuesSame); } @@ -359,20 +355,20 @@ private <T extends ResourceOrLiteralType> void testGetLinksAndValuesInChoice( value1.setString(testLiteral1); value2.setString(testLiteral2); final List<String> linksDifferent = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + .getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource2), linksDifferent); final List<String> valuesDifferent = resource.getLinkAndValueGetter() - .getValues(proxy).collect(Collectors.toList()); + .getValues(proxy).toList(); assertEquals(Arrays.asList(testLiteral1, testLiteral2), valuesDifferent); // Test with the same values value2.getResource().setResource(testResource1); value2.setString(testLiteral1); final List<String> linksSame = resource.getLinkAndValueGetter() - .getLinks(proxy).collect(Collectors.toList()); + .getLinks(proxy).toList(); assertEquals(Arrays.asList(testResource1, testResource1), linksSame); final List<String> valuesSame = resource.getLinkAndValueGetter() - .getValues(proxy).collect(Collectors.toList()); + .getValues(proxy).toList(); assertEquals(Arrays.asList(testLiteral1, testLiteral1), valuesSame); } diff --git a/metis-indexing/src/test/java/eu/europeana/indexing/utils/RdfWrapperTest.java b/metis-indexing/src/test/java/eu/europeana/indexing/utils/RdfWrapperTest.java index 7902f680a3..6fd9088c24 100644 --- a/metis-indexing/src/test/java/eu/europeana/indexing/utils/RdfWrapperTest.java +++ b/metis-indexing/src/test/java/eu/europeana/indexing/utils/RdfWrapperTest.java @@ -9,14 +9,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; + import eu.europeana.metis.schema.jibx.AboutType; import eu.europeana.metis.schema.jibx.AgentType; import eu.europeana.metis.schema.jibx.Aggregation; @@ -32,6 +25,13 @@ import eu.europeana.metis.schema.jibx.Service; import eu.europeana.metis.schema.jibx.TimeSpanType; import eu.europeana.metis.schema.jibx.WebResourceType; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import org.junit.jupiter.api.Test; class RdfWrapperTest { @@ -135,8 +135,7 @@ void testGetWebResourcesWithProcessing() { final RDF rdf = mock(RDF.class); when(rdf.getWebResourceList()).thenReturn(Arrays.asList(entity0, entity1, entity2, null)); assertEquals(Collections.singletonList(entity1.getAbout()), - new RdfWrapper(rdf).getWebResources().stream().map(WebResourceType::getAbout) - .collect(Collectors.toList())); + new RdfWrapper(rdf).getWebResources().stream().map(WebResourceType::getAbout).toList()); // Test rdf that returns null when(rdf.getWebResourceList()).thenReturn(null); diff --git a/metis-indexing/src/test/resources/europeana_record_tier_calculation_rdf.xml b/metis-indexing/src/test/resources/europeana_record_tier_calculation_rdf.xml new file mode 100644 index 0000000000..948f717386 --- /dev/null +++ b/metis-indexing/src/test/resources/europeana_record_tier_calculation_rdf.xml @@ -0,0 +1,157 @@ +<?xml version="1.0" encoding="UTF-8"?> +<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:dcterms="http://purl.org/dc/terms/" xmlns:edm="http://www.europeana.eu/schemas/edm/" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:skos="http://www.w3.org/2004/02/skos/core#" + xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:ebucore="http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#" + xmlns:ore="http://www.openarchives.org/ore/terms/" +> + <edm:ProvidedCHO rdf:about="/305/_nnhSX08"/> + <edm:WebResource rdf:about="http://mbc.malopolska.pl/Content/48386/d2j:big,0/0437_0001.djvu.jpg"> + <edm:rights rdf:resource="http://rightsstatements.org/vocab/InC/1.0/"/> + <ebucore:hasMimeType>image/jpeg</ebucore:hasMimeType> + <ebucore:fileByteSize rdf:datatype="http://www.w3.org/2001/XMLSchema#long">112031</ebucore:fileByteSize> + <ebucore:width rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">562</ebucore:width> + <ebucore:height rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">761</ebucore:height> + <edm:hasColorSpace>sRGB</edm:hasColorSpace> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#C0C0C0</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#A9A9A9</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#BDB76B</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#BC8F8F</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#D2B48C</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#808080</edm:componentColor> + <ebucore:orientation rdf:datatype="http://www.w3.org/2001/XMLSchema#string">portrait</ebucore:orientation> + </edm:WebResource> + <edm:WebResource rdf:about="http://fbc.pionier.net.pl/id/oai:mbc.malopolska.pl:48386"> + <ebucore:hasMimeType>text/html</ebucore:hasMimeType> + <ebucore:fileByteSize rdf:datatype="http://www.w3.org/2001/XMLSchema#long">1477</ebucore:fileByteSize> + </edm:WebResource> + <edm:TimeSpan rdf:about="#1923"> + <skos:prefLabel xml:lang="zxx">1923</skos:prefLabel> + <dcterms:isPartOf rdf:resource="http://data.europeana.eu/timespan/20"></dcterms:isPartOf> + <edm:begin>1923-01-01</edm:begin> + <edm:end>1923-12-31</edm:end> + <skos:notation rdf:datatype="http://id.loc.gov/datatypes/edtf/EDTF-level1">1923</skos:notation> + </edm:TimeSpan> + <edm:TimeSpan rdf:about="http://data.europeana.eu/timespan/20"> + <skos:prefLabel xml:lang="de">20. Jahrhundert</skos:prefLabel> + <skos:prefLabel xml:lang="fi">1900-luku</skos:prefLabel> + <skos:prefLabel xml:lang="ru">XX век</skos:prefLabel> + <skos:prefLabel xml:lang="pt">Século XX</skos:prefLabel> + <skos:prefLabel xml:lang="bg">20 век</skos:prefLabel> + <skos:prefLabel xml:lang="lt">XX amžius</skos:prefLabel> + <skos:prefLabel xml:lang="hr">20. stoljeće</skos:prefLabel> + <skos:prefLabel xml:lang="lv">20. gadsimts</skos:prefLabel> + <skos:prefLabel xml:lang="fr">XXe siècle</skos:prefLabel> + <skos:prefLabel xml:lang="hu">20. század</skos:prefLabel> + <skos:prefLabel xml:lang="sk">20. storočie</skos:prefLabel> + <skos:prefLabel xml:lang="sl">20. stoletje</skos:prefLabel> + <skos:prefLabel xml:lang="ga">20ú haois</skos:prefLabel> + <skos:prefLabel xml:lang="ca">Segle XX</skos:prefLabel> + <skos:prefLabel xml:lang="sv">1900-talet</skos:prefLabel> + <skos:prefLabel xml:lang="el">20ός αιώνας</skos:prefLabel> + <skos:prefLabel xml:lang="en">20th century</skos:prefLabel> + <skos:prefLabel xml:lang="it">XX secolo</skos:prefLabel> + <skos:prefLabel xml:lang="es">Siglo XX</skos:prefLabel> + <skos:prefLabel xml:lang="et">20. sajand</skos:prefLabel> + <skos:prefLabel xml:lang="cs">20. století</skos:prefLabel> + <skos:prefLabel xml:lang="eu">XX. mendea</skos:prefLabel> + <skos:prefLabel xml:lang="pl">XX wiek</skos:prefLabel> + <skos:prefLabel xml:lang="da">20. århundrede</skos:prefLabel> + <skos:prefLabel xml:lang="ro">Secolul al XX-lea</skos:prefLabel> + <skos:prefLabel xml:lang="nl">20e eeuw</skos:prefLabel> + <skos:altLabel xml:lang="sv">20:e århundradet</skos:altLabel> + <skos:altLabel xml:lang="sv">20:e seklet</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-tal</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-talet (århundrade)</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-talet (sekel)</skos:altLabel> + <skos:altLabel xml:lang="ru">20 век</skos:altLabel> + <skos:altLabel xml:lang="pt">Século 20</skos:altLabel> + <skos:altLabel xml:lang="pt">Século vinte</skos:altLabel> + <skos:altLabel xml:lang="pt">Periodo 1901-2000</skos:altLabel> + <skos:altLabel xml:lang="pt">Ciclo (1901-2000)</skos:altLabel> + <skos:altLabel xml:lang="en">20th-century</skos:altLabel> + <skos:altLabel xml:lang="en">20th-century</skos:altLabel> + <skos:altLabel xml:lang="en">Twentieth century</skos:altLabel> + <skos:altLabel xml:lang="en">The past century</skos:altLabel> + <skos:altLabel xml:lang="en">History, 20th Century</skos:altLabel> + <skos:altLabel xml:lang="en">XX Century</skos:altLabel> + <skos:altLabel xml:lang="it">Novecento</skos:altLabel> + <skos:altLabel xml:lang="it">20° secolo</skos:altLabel> + <skos:altLabel xml:lang="it">'900</skos:altLabel> + <skos:altLabel xml:lang="it">Novecento</skos:altLabel> + <skos:altLabel xml:lang="fr">20e siècle</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo 20</skos:altLabel> + <skos:altLabel xml:lang="es">El siglo pasado</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo veinte</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo XX después de Cristo</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo XX d. C.</skos:altLabel> + <edm:begin>1901-01-01</edm:begin> + <edm:end>2000-12-31</edm:end> + <edm:isNextInSequence rdf:resource="http://data.europeana.eu/timespan/19"/> + <owl:sameAs rdf:resource="http://www.wikidata.org/entity/Q6927"/> + <owl:sameAs rdf:resource="http://id.loc.gov/authorities/names/sh2002012476"/> + <owl:sameAs rdf:resource="http://id.loc.gov/authorities/names/sh85139020"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/D049673"/> + <owl:sameAs rdf:resource="https://www.freebase.com/m/089_x"/> + <owl:sameAs rdf:resource="https://g.co/kg/m/089_x"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/K01.400.504.968"/> + <owl:sameAs rdf:resource="http://vocab.getty.edu/aat/300404514"/> + <owl:sameAs rdf:resource="http://id.worldcat.org/fast/1159810"/> + <owl:sameAs rdf:resource="http://dbpedia.org/resource/20th_century"/> + </edm:TimeSpan> + <ore:Aggregation rdf:about="/aggregation/provider/305/_nnhSX08"> + <edm:aggregatedCHO rdf:resource="/305/_nnhSX08"/> + <edm:dataProvider rdf:resource="http://data.europeana.eu/organization/2897"></edm:dataProvider> + <edm:isShownAt rdf:resource="http://fbc.pionier.net.pl/id/oai:mbc.malopolska.pl:48386"/> + <edm:object rdf:resource="http://mbc.malopolska.pl/Content/48386/d2j:big,0/0437_0001.djvu.jpg"/> + <edm:provider rdf:resource="http://data.europeana.eu/organization/1566"></edm:provider> + <edm:rights rdf:resource="http://rightsstatements.org/vocab/InC/1.0/"/> + </ore:Aggregation> + <ore:Proxy rdf:about="/proxy/provider/305/_nnhSX08"> + <dc:contributor xml:lang="pl">Beaupre, Antoni.(Red.)</dc:contributor> + <dc:date xml:lang="pl">1923</dc:date> + <dc:description xml:lang="pl">Wojewódzka Biblioteka Publiczna w Krakowie</dc:description> + <dc:format>image/vnd.djvu</dc:format> + <dc:language>pol</dc:language> + <dc:publisher xml:lang="pl">Spółka Wydawnicza "Czas"</dc:publisher> + <dc:rights xml:lang="pl">Biblioteka Książąt Czartoryskich</dc:rights> + <dc:subject xml:lang="pl">20 w.</dc:subject> + <dc:subject xml:lang="pl">gazety polskie</dc:subject> + <dc:title xml:lang="pl">Czas. 1923, nr 251 (10 XI)</dc:title> + <dc:type xml:lang="pl">czasopismo</dc:type> + <dcterms:extent>Dziennik polityczny i literacko-społeczny; organ konserwatystów. - Opis fiz.: 4 s. ; 61 cm.</dcterms:extent> + <edm:europeanaProxy>false</edm:europeanaProxy> + <ore:proxyFor rdf:resource="/305/_nnhSX08"/> + <ore:proxyIn rdf:resource="/aggregation/provider/305/_nnhSX08"/> + <edm:type>TEXT</edm:type> + </ore:Proxy> + <ore:Proxy rdf:about="/proxy/europeana/305/_nnhSX08"> + <dc:date rdf:resource="#1923"></dc:date> + <dc:identifier>#nnhSX08</dc:identifier> + <dc:language>pol</dc:language> + <edm:europeanaProxy>true</edm:europeanaProxy> + <edm:year>1923</edm:year> + <ore:proxyFor rdf:resource="/305/_nnhSX08"/> + <ore:proxyIn rdf:resource="/aggregation/europeana/305/_nnhSX08"/> + <ore:lineage rdf:resource="/proxy/provider/305/_nnhSX08"/> + </ore:Proxy> + <edm:EuropeanaAggregation rdf:about="/aggregation/europeana/305/_nnhSX08"> + <edm:aggregatedCHO rdf:resource="/305/_nnhSX08"/> + <edm:dataProvider xml:lang="en">Europeana Foundation</edm:dataProvider> + <edm:provider xml:lang="en">Europeana Foundation</edm:provider> + <edm:datasetName>305_local_26072024_1620</edm:datasetName> + <edm:country>Netherlands</edm:country> + <edm:preview rdf:resource="http://mbc.malopolska.pl/Content/48386/d2j:big,0/0437_0001.djvu.jpg"/> + <edm:language>nl</edm:language> + <edm:completeness>6</edm:completeness> + </edm:EuropeanaAggregation> + <foaf:Organization rdf:about="http://data.europeana.eu/organization/1566"> + <skos:prefLabel xml:lang="en">Digital Libraries Federation</skos:prefLabel> + <skos:prefLabel xml:lang="pl">Federacja Bibliotek Cyfrowych</skos:prefLabel> + </foaf:Organization> + <foaf:Organization rdf:about="http://data.europeana.eu/organization/2897"> + <skos:prefLabel xml:lang="en">Malopolska Digital Library</skos:prefLabel> + <skos:prefLabel xml:lang="pl">Małopolska Biblioteka Cyfrowa</skos:prefLabel> + </foaf:Organization> +</rdf:RDF> diff --git a/metis-indexing/src/test/resources/europeana_record_with_normalized_date_timespan.xml b/metis-indexing/src/test/resources/europeana_record_with_normalized_date_timespan.xml new file mode 100644 index 0000000000..758f9b6099 --- /dev/null +++ b/metis-indexing/src/test/resources/europeana_record_with_normalized_date_timespan.xml @@ -0,0 +1,431 @@ +<?xml version="1.0" encoding="UTF-8"?> +<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:dcterms="http://purl.org/dc/terms/" xmlns:edm="http://www.europeana.eu/schemas/edm/" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:skos="http://www.w3.org/2004/02/skos/core#" + xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:ebucore="http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#" + xmlns:doap="http://usefulinc.com/ns/doap#" + xmlns:ore="http://www.openarchives.org/ore/terms/" xmlns:svcs="http://rdfs.org/sioc/services#" +> + <edm:ProvidedCHO rdf:about="/875/object_NLS2__RS_18_______2LYQ776"/> + <edm:WebResource + rdf:about="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/full/full/0/default.jpg"> + <dc:format>image/jpg</dc:format> + <dc:type>text</dc:type> + <dc:type>images</dc:type> + <dcterms:conformsTo>TEI P5 ENRICH Schema</dcterms:conformsTo> + <edm:rights rdf:resource="http://creativecommons.org/licenses/by-nc-sa/4.0/"/> + <ebucore:hasMimeType>image/jpeg</ebucore:hasMimeType> + <ebucore:fileByteSize rdf:datatype="http://www.w3.org/2001/XMLSchema#long">241107</ebucore:fileByteSize> + <ebucore:width rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">1379</ebucore:width> + <ebucore:height rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">827</ebucore:height> + <edm:hasColorSpace>sRGB</edm:hasColorSpace> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#DEB887</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#556B2F</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#9ACD32</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#CD5C5C</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#FFE4B5</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#A52A2A</edm:componentColor> + <ebucore:orientation rdf:datatype="http://www.w3.org/2001/XMLSchema#string">landscape</ebucore:orientation> + <dcterms:isReferencedBy + rdf:resource="https://collectiones.manuscriptorium.com/assorted/NLS___/NLS2__/6/NLS___-NLS2__RS_18_______2LYQ776-sr"></dcterms:isReferencedBy> + <svcs:has_service rdf:resource="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/"/> + </edm:WebResource> + <edm:WebResource + rdf:about="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/full/!400,400/0/default.jpg"> + <ebucore:hasMimeType>image/jpeg</ebucore:hasMimeType> + <ebucore:fileByteSize rdf:datatype="http://www.w3.org/2001/XMLSchema#long">26028</ebucore:fileByteSize> + <ebucore:width rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">400</ebucore:width> + <ebucore:height rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">239</ebucore:height> + <edm:hasColorSpace>sRGB</edm:hasColorSpace> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#556B2F</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#DEB887</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#2E8B57</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#BC8F8F</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#F0E68C</edm:componentColor> + <edm:componentColor rdf:datatype="http://www.w3.org/2001/XMLSchema#hexBinary">#9ACD32</edm:componentColor> + <ebucore:orientation rdf:datatype="http://www.w3.org/2001/XMLSchema#string">landscape</ebucore:orientation> + </edm:WebResource> + <edm:WebResource + rdf:about="http://www.manuscriptorium.com/apps/index.php?direct=record&pid=NLS___-NLS2__RS_18_______2LYQ776-sr"> + <ebucore:hasMimeType>text/html</ebucore:hasMimeType> + <ebucore:fileByteSize rdf:datatype="http://www.w3.org/2001/XMLSchema#long">0</ebucore:fileByteSize> + </edm:WebResource> + <edm:TimeSpan rdf:about="#1426%2F1450"> + <skos:prefLabel xml:lang="zxx">1426/1450</skos:prefLabel> + <dcterms:isPartOf rdf:resource="http://data.europeana.eu/timespan/15"></dcterms:isPartOf> + <edm:begin>1426-01-01</edm:begin> + <edm:end>1450-12-31</edm:end> + <skos:notation rdf:datatype="http://id.loc.gov/datatypes/edtf/EDTF-level1">1426/1450</skos:notation> + </edm:TimeSpan> + <edm:TimeSpan rdf:about="#1942"> + <skos:prefLabel xml:lang="zxx">1942</skos:prefLabel> + <dcterms:isPartOf rdf:resource="http://data.europeana.eu/timespan/20"></dcterms:isPartOf> + <edm:begin>1942-01-01</edm:begin> + <edm:end>1942-12-31</edm:end> + <skos:notation rdf:datatype="http://id.loc.gov/datatypes/edtf/EDTF-level1">1942</skos:notation> + </edm:TimeSpan> + <edm:TimeSpan rdf:about="http://data.europeana.eu/timespan/20"> + <skos:prefLabel xml:lang="de">20. Jahrhundert</skos:prefLabel> + <skos:prefLabel xml:lang="fi">1900-luku</skos:prefLabel> + <skos:prefLabel xml:lang="ru">XX век</skos:prefLabel> + <skos:prefLabel xml:lang="pt">Século XX</skos:prefLabel> + <skos:prefLabel xml:lang="bg">20 век</skos:prefLabel> + <skos:prefLabel xml:lang="lt">XX amžius</skos:prefLabel> + <skos:prefLabel xml:lang="hr">20. stoljeće</skos:prefLabel> + <skos:prefLabel xml:lang="lv">20. gadsimts</skos:prefLabel> + <skos:prefLabel xml:lang="fr">XXe siècle</skos:prefLabel> + <skos:prefLabel xml:lang="hu">20. század</skos:prefLabel> + <skos:prefLabel xml:lang="sk">20. storočie</skos:prefLabel> + <skos:prefLabel xml:lang="sl">20. stoletje</skos:prefLabel> + <skos:prefLabel xml:lang="ga">20ú haois</skos:prefLabel> + <skos:prefLabel xml:lang="ca">Segle XX</skos:prefLabel> + <skos:prefLabel xml:lang="sv">1900-talet</skos:prefLabel> + <skos:prefLabel xml:lang="el">20ός αιώνας</skos:prefLabel> + <skos:prefLabel xml:lang="en">20th century</skos:prefLabel> + <skos:prefLabel xml:lang="it">XX secolo</skos:prefLabel> + <skos:prefLabel xml:lang="es">Siglo XX</skos:prefLabel> + <skos:prefLabel xml:lang="et">20. sajand</skos:prefLabel> + <skos:prefLabel xml:lang="cs">20. století</skos:prefLabel> + <skos:prefLabel xml:lang="eu">XX. mendea</skos:prefLabel> + <skos:prefLabel xml:lang="pl">XX wiek</skos:prefLabel> + <skos:prefLabel xml:lang="da">20. århundrede</skos:prefLabel> + <skos:prefLabel xml:lang="ro">Secolul al XX-lea</skos:prefLabel> + <skos:prefLabel xml:lang="nl">20e eeuw</skos:prefLabel> + <skos:altLabel xml:lang="sv">20:e århundradet</skos:altLabel> + <skos:altLabel xml:lang="sv">20:e seklet</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-tal</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-talet (århundrade)</skos:altLabel> + <skos:altLabel xml:lang="sv">1900-talet (sekel)</skos:altLabel> + <skos:altLabel xml:lang="ru">20 век</skos:altLabel> + <skos:altLabel xml:lang="pt">Século 20</skos:altLabel> + <skos:altLabel xml:lang="pt">Século vinte</skos:altLabel> + <skos:altLabel xml:lang="pt">Periodo 1901-2000</skos:altLabel> + <skos:altLabel xml:lang="pt">Ciclo (1901-2000)</skos:altLabel> + <skos:altLabel xml:lang="en">20th-century</skos:altLabel> + <skos:altLabel xml:lang="en">20th-century</skos:altLabel> + <skos:altLabel xml:lang="en">Twentieth century</skos:altLabel> + <skos:altLabel xml:lang="en">The past century</skos:altLabel> + <skos:altLabel xml:lang="en">History, 20th Century</skos:altLabel> + <skos:altLabel xml:lang="en">XX Century</skos:altLabel> + <skos:altLabel xml:lang="it">Novecento</skos:altLabel> + <skos:altLabel xml:lang="it">20° secolo</skos:altLabel> + <skos:altLabel xml:lang="it">'900</skos:altLabel> + <skos:altLabel xml:lang="it">Novecento</skos:altLabel> + <skos:altLabel xml:lang="fr">20e siècle</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo 20</skos:altLabel> + <skos:altLabel xml:lang="es">El siglo pasado</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo veinte</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo XX después de Cristo</skos:altLabel> + <skos:altLabel xml:lang="es">Siglo XX d. C.</skos:altLabel> + <edm:begin>1901-01-01</edm:begin> + <edm:end>2000-12-31</edm:end> + <edm:isNextInSequence rdf:resource="http://data.europeana.eu/timespan/19"/> + <owl:sameAs rdf:resource="http://www.wikidata.org/entity/Q6927"/> + <owl:sameAs rdf:resource="http://id.loc.gov/authorities/names/sh2002012476"/> + <owl:sameAs rdf:resource="http://id.loc.gov/authorities/names/sh85139020"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/D049673"/> + <owl:sameAs rdf:resource="https://www.freebase.com/m/089_x"/> + <owl:sameAs rdf:resource="https://g.co/kg/m/089_x"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/K01.400.504.968"/> + <owl:sameAs rdf:resource="http://vocab.getty.edu/aat/300404514"/> + <owl:sameAs rdf:resource="http://id.worldcat.org/fast/1159810"/> + <owl:sameAs rdf:resource="http://dbpedia.org/resource/20th_century"/> + </edm:TimeSpan> + <edm:TimeSpan rdf:about="http://data.europeana.eu/timespan/15"> + <skos:prefLabel xml:lang="de">15. Jahrhundert</skos:prefLabel> + <skos:prefLabel xml:lang="fi">1400-luku</skos:prefLabel> + <skos:prefLabel xml:lang="ru">XV век</skos:prefLabel> + <skos:prefLabel xml:lang="pt">Século XV</skos:prefLabel> + <skos:prefLabel xml:lang="bg">15 век</skos:prefLabel> + <skos:prefLabel xml:lang="lt">XV amžius</skos:prefLabel> + <skos:prefLabel xml:lang="hr">15. stoljeće</skos:prefLabel> + <skos:prefLabel xml:lang="lv">15. gadsimts</skos:prefLabel> + <skos:prefLabel xml:lang="fr">XVe siècle</skos:prefLabel> + <skos:prefLabel xml:lang="hu">15. század</skos:prefLabel> + <skos:prefLabel xml:lang="sk">15. storočie</skos:prefLabel> + <skos:prefLabel xml:lang="sl">15. stoletje</skos:prefLabel> + <skos:prefLabel xml:lang="ga">15ú haois</skos:prefLabel> + <skos:prefLabel xml:lang="ca">Segle XV</skos:prefLabel> + <skos:prefLabel xml:lang="sv">1400-talet</skos:prefLabel> + <skos:prefLabel xml:lang="el">15ος αιώνας</skos:prefLabel> + <skos:prefLabel xml:lang="en">15th century</skos:prefLabel> + <skos:prefLabel xml:lang="it">XV secolo</skos:prefLabel> + <skos:prefLabel xml:lang="es">Siglo XV</skos:prefLabel> + <skos:prefLabel xml:lang="et">15. sajand</skos:prefLabel> + <skos:prefLabel xml:lang="cs">15. století</skos:prefLabel> + <skos:prefLabel xml:lang="eu">XV. mendea</skos:prefLabel> + <skos:prefLabel xml:lang="pl">XV wiek</skos:prefLabel> + <skos:prefLabel xml:lang="da">15. århundrede</skos:prefLabel> + <skos:prefLabel xml:lang="ro">Secolul al XV-lea</skos:prefLabel> + <skos:prefLabel xml:lang="nl">15e eeuw</skos:prefLabel> + <skos:altLabel xml:lang="ru">15 век</skos:altLabel> + <skos:altLabel xml:lang="sv">15:e århundradet</skos:altLabel> + <skos:altLabel xml:lang="sv">15:e seklet</skos:altLabel> + <skos:altLabel xml:lang="sv">1400-tal</skos:altLabel> + <skos:altLabel xml:lang="sv">1400-talet (århundrade)</skos:altLabel> + <skos:altLabel xml:lang="sv">1400-talet (sekel)</skos:altLabel> + <skos:altLabel xml:lang="en">15th-century</skos:altLabel> + <skos:altLabel xml:lang="en">15th-century</skos:altLabel> + <skos:altLabel xml:lang="en">Quatro Cento</skos:altLabel> + <skos:altLabel xml:lang="it">Quattrocento</skos:altLabel> + <skos:altLabel xml:lang="it">15° secolo</skos:altLabel> + <skos:altLabel xml:lang="it">Secolo XV</skos:altLabel> + <skos:altLabel xml:lang="it">XV Secolo</skos:altLabel> + <skos:altLabel xml:lang="it">Quindicesimo secolo</skos:altLabel> + <skos:altLabel xml:lang="it">XV secolo d.C</skos:altLabel> + <skos:altLabel xml:lang="it">Quattrocento</skos:altLabel> + <skos:altLabel xml:lang="fr">15e siècle</skos:altLabel> + <edm:begin>1401-01-01</edm:begin> + <edm:end>1500-01-01</edm:end> + <edm:isNextInSequence rdf:resource="http://data.europeana.eu/timespan/14"/> + <owl:sameAs rdf:resource="http://www.wikidata.org/entity/Q7018"/> + <owl:sameAs rdf:resource="http://id.loc.gov/authorities/names/sh85048142"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/D049668"/> + <owl:sameAs rdf:resource="https://www.freebase.com/m/08b1m"/> + <owl:sameAs rdf:resource="https://g.co/kg/m/08b1m"/> + <owl:sameAs rdf:resource="http://id.nlm.nih.gov/mesh/K01.400.475.500"/> + <owl:sameAs rdf:resource="http://vocab.getty.edu/aat/300404465"/> + <owl:sameAs rdf:resource="http://babelnet.org/rdf/s02814426n"/> + <owl:sameAs rdf:resource="http://dbpedia.org/resource/15th_century"/> + </edm:TimeSpan> + <skos:Concept rdf:about="http://data.europeana.eu/concept/2830"> + <skos:broader rdf:resource="http://data.europeana.eu/concept/17"/> + <skos:broader rdf:resource="http://data.europeana.eu/concept/26"/> + <skos:exactMatch rdf:resource="http://www.wikidata.org/entity/Q213924"/> + <skos:exactMatch rdf:resource="https://d-nb.info/gnd/4148186-0"/> + <skos:exactMatch rdf:resource="https://www.freebase.com/m/01qk0"/> + <skos:exactMatch rdf:resource="https://g.co/kg/m/01qk0"/> + <skos:exactMatch rdf:resource="http://vocab.getty.edu/aat/300224200"/> + <skos:exactMatch rdf:resource="http://www.yso.fi/onto/yso/p13680"/> + <skos:exactMatch rdf:resource="http://babelnet.org/rdf/s00020361n"/> + <skos:exactMatch rdf:resource="http://dbpedia.org/resource/Codex"/> + <skos:note xml:lang="de">Manuskript in Buchform (Vorläufer des Heutigen Buches)</skos:note> + <skos:note xml:lang="ru">Одна из исторических форм книги</skos:note> + <skos:note xml:lang="fi">Käsin kirjoitettu kirja</skos:note> + <skos:note xml:lang="bg">Ръкопис</skos:note> + <skos:note xml:lang="en">Book with handwritten content</skos:note> + <skos:note xml:lang="it">Manoscritto in forma di libro</skos:note> + <skos:note xml:lang="fr">Cahier formé de pages manuscrites reliées ensemble, ancêtre du livre moderne</skos:note> + <skos:note xml:lang="hu">Kézzel írott középkori könyv</skos:note> + <skos:note xml:lang="es">Libro compuesto de páginas manuscritas</skos:note> + <skos:note xml:lang="cs">Rukopis ve formě knihy</skos:note> + <skos:note xml:lang="pl">Forma książki w postaci stron złączonych u grzbietu (zwłaszcza w kontekście historycznym jako + manuskrypt) + </skos:note> + <skos:note xml:lang="da">Håndskreven bog</skos:note> + <skos:note xml:lang="ca">Llibre compost de pàgines manuscrites</skos:note> + <skos:prefLabel xml:lang="de">Kodex</skos:prefLabel> + <skos:prefLabel xml:lang="ru">Кодекс</skos:prefLabel> + <skos:prefLabel xml:lang="fi">Koodeksi</skos:prefLabel> + <skos:prefLabel xml:lang="pt">Códice</skos:prefLabel> + <skos:prefLabel xml:lang="bg">Кодекс</skos:prefLabel> + <skos:prefLabel xml:lang="lt">Kodeksas</skos:prefLabel> + <skos:prefLabel xml:lang="hr">Kodeks</skos:prefLabel> + <skos:prefLabel xml:lang="lv">Kodekss</skos:prefLabel> + <skos:prefLabel xml:lang="fr">Codex</skos:prefLabel> + <skos:prefLabel xml:lang="hu">Kódex</skos:prefLabel> + <skos:prefLabel xml:lang="sk">Zákonník</skos:prefLabel> + <skos:prefLabel xml:lang="sl">Kodeks</skos:prefLabel> + <skos:prefLabel xml:lang="ga">Coidéacs</skos:prefLabel> + <skos:prefLabel xml:lang="ca">Còdex</skos:prefLabel> + <skos:prefLabel xml:lang="sv">Codex</skos:prefLabel> + <skos:prefLabel xml:lang="el">Κώδικας</skos:prefLabel> + <skos:prefLabel xml:lang="en">Codex</skos:prefLabel> + <skos:prefLabel xml:lang="it">Codice</skos:prefLabel> + <skos:prefLabel xml:lang="es">Códice</skos:prefLabel> + <skos:prefLabel xml:lang="et">Koodeks</skos:prefLabel> + <skos:prefLabel xml:lang="eu">Kodex</skos:prefLabel> + <skos:prefLabel xml:lang="cs">Kodex</skos:prefLabel> + <skos:prefLabel xml:lang="pl">Kodeks</skos:prefLabel> + <skos:prefLabel xml:lang="ro">Codex</skos:prefLabel> + <skos:prefLabel xml:lang="da">Kodeks</skos:prefLabel> + <skos:prefLabel xml:lang="nl">Codex</skos:prefLabel> + </skos:Concept> + <skos:Concept rdf:about="http://data.europeana.eu/concept/81"> + <skos:exactMatch rdf:resource="http://www.wikidata.org/entity/Q12554"/> + <skos:exactMatch rdf:resource="https://d-nb.info/gnd/4129108-6"/> + <skos:exactMatch rdf:resource="http://id.loc.gov/authorities/names/sh85085001"/> + <skos:exactMatch rdf:resource="http://data.bnf.fr/ark:/12148/cb133185191"/> + <skos:exactMatch rdf:resource="http://id.nlm.nih.gov/mesh/D049691"/> + <skos:exactMatch rdf:resource="http://purl.org/bncf/tid/1066"/> + <skos:exactMatch rdf:resource="https://www.freebase.com/m/04rjz"/> + <skos:exactMatch rdf:resource="https://g.co/kg/m/04rjz"/> + <skos:exactMatch rdf:resource="http://id.nlm.nih.gov/mesh/K01.400.500"/> + <skos:exactMatch rdf:resource="http://vocab.getty.edu/aat/300020756"/> + <skos:exactMatch rdf:resource="http://iconclass.org/23T25"/> + <skos:exactMatch rdf:resource="http://id.worldcat.org/fast/1020301"/> + <skos:exactMatch rdf:resource="http://www.yso.fi/onto/yso/p2559"/> + <skos:exactMatch rdf:resource="http://babelnet.org/rdf/s00025241n"/> + <skos:exactMatch rdf:resource="http://zbw.eu/stw/descriptor/15696-4"/> + <skos:exactMatch rdf:resource="http://vocabularies.unesco.org/thesaurus/concept8948"/> + <skos:exactMatch rdf:resource="http://dbpedia.org/resource/Middle_Ages"/> + <skos:note xml:lang="de">Historische Epoche</skos:note> + <skos:note xml:lang="sv">Period i Europas historia mellan antiken och renässansen från slutet av 400-talet till 1400-talet + </skos:note> + <skos:note xml:lang="ru">Период истории, следующий за Античностью и предшествующий Новому времени</skos:note> + <skos:note xml:lang="fi">Euroopan historian ajanjakso vuosina 400–1500</skos:note> + <skos:note xml:lang="pt">Período da história entre 476 d.c a 1453</skos:note> + <skos:note xml:lang="el">Ιστορική περίοδος</skos:note> + <skos:note xml:lang="en">Period of European history from the 5th to the late 15th-century</skos:note> + <skos:note xml:lang="lv">Laika posms no 5. līdz 15. gadsimtam</skos:note> + <skos:note xml:lang="it">Periodo della storia europea compreso tra il V e il XV secolo</skos:note> + <skos:note xml:lang="fr">Période de l'histoire de l'Europe, du début du Ve siècle à la fin du XVe siècle</skos:note> + <skos:note xml:lang="es">Período histórico de la civilización occidental</skos:note> + <skos:note xml:lang="cs">Dějinná epocha</skos:note> + <skos:note xml:lang="eu">Europako garai historiko</skos:note> + <skos:note xml:lang="sk">Obdobie dejín</skos:note> + <skos:note xml:lang="pl">Epoka w dziejach Europy (VI–XV w.)</skos:note> + <skos:note xml:lang="ro">Perioadă în istoria europeană ce a ținut din anul 476 până în 1453</skos:note> + <skos:note xml:lang="ca">Període històric de la civilització occidental</skos:note> + <skos:note xml:lang="nl">Periode in de Europese geschiedenis</skos:note> + <skos:prefLabel xml:lang="de">Mittelalter</skos:prefLabel> + <skos:prefLabel xml:lang="fi">Keskiaika</skos:prefLabel> + <skos:prefLabel xml:lang="ru">Средние века</skos:prefLabel> + <skos:prefLabel xml:lang="pt">Idade Média</skos:prefLabel> + <skos:prefLabel xml:lang="bg">Средновековие</skos:prefLabel> + <skos:prefLabel xml:lang="lt">Viduramžiai</skos:prefLabel> + <skos:prefLabel xml:lang="hr">Srednji vijek</skos:prefLabel> + <skos:prefLabel xml:lang="lv">Viduslaiki</skos:prefLabel> + <skos:prefLabel xml:lang="fr">Moyen Âge</skos:prefLabel> + <skos:prefLabel xml:lang="hu">Középkor</skos:prefLabel> + <skos:prefLabel xml:lang="sk">Stredovek</skos:prefLabel> + <skos:prefLabel xml:lang="sl">Srednji vek</skos:prefLabel> + <skos:prefLabel xml:lang="ga">An Mheánaois</skos:prefLabel> + <skos:prefLabel xml:lang="ca">Edat mitjana</skos:prefLabel> + <skos:prefLabel xml:lang="sv">Medeltiden</skos:prefLabel> + <skos:prefLabel xml:lang="el">Μεσαίωνας</skos:prefLabel> + <skos:prefLabel xml:lang="mt">Medjuevu</skos:prefLabel> + <skos:prefLabel xml:lang="en">Middle Ages</skos:prefLabel> + <skos:prefLabel xml:lang="it">Medioevo</skos:prefLabel> + <skos:prefLabel xml:lang="es">Edad Media</skos:prefLabel> + <skos:prefLabel xml:lang="et">Keskaeg</skos:prefLabel> + <skos:prefLabel xml:lang="cs">Středověk</skos:prefLabel> + <skos:prefLabel xml:lang="eu">Erdi Aroa</skos:prefLabel> + <skos:prefLabel xml:lang="pl">Średniowiecze</skos:prefLabel> + <skos:prefLabel xml:lang="da">Middelalderen</skos:prefLabel> + <skos:prefLabel xml:lang="ro">Evul Mediu</skos:prefLabel> + <skos:prefLabel xml:lang="nl">Middeleeuwen</skos:prefLabel> + </skos:Concept> + <skos:Concept rdf:about="http://data.europeana.eu/concept/2967"> + <skos:exactMatch rdf:resource="http://www.wikidata.org/entity/Q107274057"/> + <skos:exactMatch rdf:resource="https://d-nb.info/gnd/4273696-1"/> + <skos:exactMatch rdf:resource="http://vocab.getty.edu/aat/300411614"/> + <skos:note xml:lang="en">Interaction, communication, artifacts, and customs related to written texts</skos:note> + <skos:note xml:lang="sl">Interakcija, komunikacija, artefakti in običaji, povezani s pisnimi besedili</skos:note> + <skos:note xml:lang="nl">Interactie, communicatie, objecten en gebruiken met betrekking tot schriftelijk vastgelegde + teksten. + </skos:note> + <skos:prefLabel xml:lang="cs">Čtenářská kultura</skos:prefLabel> + <skos:prefLabel xml:lang="de">Lesekultur</skos:prefLabel> + <skos:prefLabel xml:lang="en">Reading culture</skos:prefLabel> + <skos:prefLabel xml:lang="sl">Bralna kultura</skos:prefLabel> + <skos:prefLabel xml:lang="lv">Lasīšanas kultūra</skos:prefLabel> + <skos:prefLabel xml:lang="nl">Leescultuur</skos:prefLabel> + </skos:Concept> + <ore:Aggregation rdf:about="/aggregation/provider/875/object_NLS2__RS_18_______2LYQ776"> + <edm:aggregatedCHO rdf:resource="/875/object_NLS2__RS_18_______2LYQ776"/> + <edm:dataProvider rdf:resource="http://data.europeana.eu/organization/4574"></edm:dataProvider> + <edm:isShownAt + rdf:resource="http://www.manuscriptorium.com/apps/index.php?direct=record&pid=NLS___-NLS2__RS_18_______2LYQ776-sr"/> + <edm:isShownBy + rdf:resource="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/full/full/0/default.jpg"/> + <edm:object + rdf:resource="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/full/!400,400/0/default.jpg"/> + <edm:provider rdf:resource="http://data.europeana.eu/organization/4577"></edm:provider> + <edm:rights rdf:resource="http://creativecommons.org/licenses/by-nc-sa/4.0/"/> + </ore:Aggregation> + <ore:Proxy rdf:about="/proxy/europeana/875/object_NLS2__RS_18_______2LYQ776"> + <dc:date rdf:resource="#1942"></dc:date> + <dc:date rdf:resource="#1426%2F1450"></dc:date> + <dcterms:created rdf:resource="#1942"></dcterms:created> + <dcterms:created rdf:resource="#1426%2F1450"></dcterms:created> + <dcterms:issued rdf:resource="#1942"></dcterms:issued> + <dc:identifier>http://www.manuscriptorium.com/object/NLS2__RS_18_______2LYQ776</dc:identifier> + <dc:language>chu</dc:language> + <dc:format rdf:resource="http://data.europeana.eu/concept/2830"></dc:format> + <dc:subject rdf:resource="http://data.europeana.eu/concept/81"></dc:subject> + <dc:subject rdf:resource="http://data.europeana.eu/concept/2967"></dc:subject> + <edm:europeanaProxy>true</edm:europeanaProxy> + <edm:year>1942</edm:year> + <ore:proxyFor rdf:resource="/875/object_NLS2__RS_18_______2LYQ776"/> + <ore:proxyIn rdf:resource="/aggregation/europeana/875/object_NLS2__RS_18_______2LYQ776"/> + <ore:lineage rdf:resource="/proxy/provider/875/object_NLS2__RS_18_______2LYQ776"/> + </ore:Proxy> + <ore:Proxy rdf:about="/proxy/provider/875/object_NLS2__RS_18_______2LYQ776"> + <dc:creator xml:lang="sr">????????? ??????????</dc:creator> + <dc:creator rdf:resource="auth_0002"></dc:creator> + <dc:creator xml:lang="sr">????? ?????</dc:creator> + <dc:creator xml:lang="sr">??????</dc:creator> + <dc:creator xml:lang="sr">?????? II</dc:creator> + <dc:creator xml:lang="sr">????? ??????????</dc:creator> + <dc:date>1426-1450</dc:date> + <dc:date>1942</dc:date> + <dc:description>???? ???????? ???? ????? ????????? ?? ????? ???????? ?????? (???????-??????) ? ?????? ?? ??????? ? ????????? + ??????? ??. ?????????? ??????? ?? ???????? ????? ??????, ???????????? ???????? ??. ???? ? ??. ???????, ????????? ??????? ??. + ???????????? ????????, ?????????? ????????? ? ????????? ??????? ??. ????, ????????? ??????? ??. ??????? ?? ?????????, + ???????? ??. ???????????? ???????? ? ??. ???????????? ?????????? ?? ???????????? ?????? ? ??????? ??. ???????????? ???????? + ?? ???????? ????? ??????. + </dc:description> + <dc:description>????? II ???? ???????? (95 ?.) ???????? ?????????? ??????????? I ???? (158 ?.), ??? ????? ?? ?? ?????? ? + ???????? ???? ???????? ?????????? ????????, ??. ???????? ??????. + </dc:description> + <dc:description>???????? ????????, ????????? ??? ?. ??? ?. 35. ?. 82 ?? ???? ??????. ?????????? ?? ?. 14v, 37v ? 56v. ??????? + ?? 1966. ??????????? ? ???. + </dc:description> + <dc:description>22,5 ? 15,2</dc:description> + <dc:description>22,5 ? 14,5</dc:description> + <dc:description>??????????? ?????; ??????? ??????.</dc:description> + <dc:description>????????? ????????? ?? ????????? ????????.</dc:description> + <dc:description>?? ?. 1r: ????? ?????????? ?? 1942; ?? ?. 110v, ????? ????? ????; ?? 123r: ???????? ???????? ? ???? ? ???????? + ???????; 156v, ??????? ?????????; ?? ?????????? ?????? ????? ??????, ?? 1755, ????? ? ???? ????? ?????? ?? ???? ???? ????? + ????????? ??????????. + </dc:description> + <dc:description>????, ???? 27,818</dc:description> + <dc:description>????????????, ???????? ? ????????? ??????, ???????? ???????? ?????? ??????.</dc:description> + <dc:format xml:lang="en">codex</dc:format> + <dc:identifier>?? 18</dc:identifier> + <dc:language>cu</dc:language> + <dc:subject rdf:resource="http://vocab.getty.edu/aat/300020756"></dc:subject> + <dc:subject rdf:resource="http://vocab.getty.edu/aat/300411614"></dc:subject> + <dc:subject rdf:resource="http://www.wikidata.org/entity/Q107274053"></dc:subject> + <dc:title>????? ????????? ?? ???????-?????? ? ??????</dc:title> + <dcterms:created>1426-1450</dcterms:created> + <dcterms:created xml:lang="sr">????? ????????? XV ?. ? 1525.</dcterms:created> + <dcterms:created>1942</dcterms:created> + <dcterms:extent>21 x 28,2</dcterms:extent> + <dcterms:isPartOf xml:lang="en">Art of Reading in the Middle Ages: updated item</dcterms:isPartOf> + <dcterms:isReferencedBy>?. ?????????-????????, ?. ???????????-?????, ?. ??????, ???? ????????? ???????? ??????? ?????????? + ??????. ????? ????, ??????? 1986, 30-34. + </dcterms:isReferencedBy> + <dcterms:issued>1942</dcterms:issued> + <dcterms:medium xml:lang="en">chart</dcterms:medium> + <dcterms:spatial xml:lang="sr">??????</dcterms:spatial> + <edm:currentLocation>Belgrade</edm:currentLocation> + <edm:europeanaProxy>false</edm:europeanaProxy> + <ore:proxyFor rdf:resource="/875/object_NLS2__RS_18_______2LYQ776"/> + <ore:proxyIn rdf:resource="/aggregation/provider/875/object_NLS2__RS_18_______2LYQ776"/> + <edm:type>TEXT</edm:type> + </ore:Proxy> + <edm:EuropeanaAggregation rdf:about="/aggregation/europeana/875/object_NLS2__RS_18_______2LYQ776"> + <edm:aggregatedCHO rdf:resource="/875/object_NLS2__RS_18_______2LYQ776"/> + <edm:dataProvider xml:lang="en">Europeana Foundation</edm:dataProvider> + <edm:provider xml:lang="en">Europeana Foundation</edm:provider> + <edm:datasetName>875_e2e_tests_test_source</edm:datasetName> + <edm:country>Europe</edm:country> + <edm:preview + rdf:resource="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/full/!400,400/0/default.jpg"/> + <edm:language>mul</edm:language> + <edm:completeness>7</edm:completeness> + </edm:EuropeanaAggregation> + <foaf:Organization rdf:about="http://data.europeana.eu/organization/4574"> + <skos:prefLabel xml:lang="en">National Library of Serbia</skos:prefLabel> + </foaf:Organization> + <foaf:Organization rdf:about="http://data.europeana.eu/organization/4577"> + <skos:prefLabel xml:lang="en">Manuscriptorium</skos:prefLabel> + </foaf:Organization> + <svcs:Service rdf:about="https://imagines.manuscriptorium.com/loris/NLS___-NLS2__RS_18_______2LYQ776-sr/id_001/"> + <dcterms:conformsTo rdf:resource="http://iiif.io/api/image"></dcterms:conformsTo> + <doap:implements rdf:resource="http://iiif.io/api/image/2/level2.json"/> + </svcs:Service> +</rdf:RDF> \ No newline at end of file diff --git a/metis-indexing/src/test/resources/solr/schema.xml b/metis-indexing/src/test/resources/solr/schema.xml index d6ae3e61e5..ef64a2e64f 100644 --- a/metis-indexing/src/test/resources/solr/schema.xml +++ b/metis-indexing/src/test/resources/solr/schema.xml @@ -1,231 +1,268 @@ <?xml version="1.0" encoding="UTF-8"?> <schema name="europeana-simplified" version="1.6"> + <types> + <!-- ASSETS TRAINING FIELDTYPES --> + <fieldType name="int" class="solr.IntPointField" positionIncrementGap="0"/> + <fieldType name="float" class="solr.FloatPointField" positionIncrementGap="0"/> + <fieldType name="long" class="solr.LongPointField" positionIncrementGap="0"/> + <fieldType name="double" class="solr.DoublePointField" positionIncrementGap="0"/> + <fieldType name="string" class="solr.StrField" sortMissingLast="true"/> + <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/> + <fieldType name="date" class="solr.DatePointField" positionIncrementGap="0"/> + <fieldType name="random" class="solr.RandomSortField"/> + <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100"> + <analyzer type="index"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.FlattenGraphFilterFactory"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + <analyzer type="query"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + </fieldType> + <!-- Text field type with all strings as doc values for sorting and faceting --> + <fieldType name="sortable_text_general" class="solr.SortableTextField" positionIncrementGap="100"> + <analyzer type="index"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.FlattenGraphFilterFactory"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + <analyzer type="query"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + </fieldType> + + <!-- + <fieldType name="proper_name" class="solr.TextField" positionIncrementGap="100"> + <analyzer type="index"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.FlattenGraphFilterFactory"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + <analyzer type="query"> + <tokenizer class="solr.WhitespaceTokenizerFactory" /> + <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> + <filter class="solr.LowerCaseFilterFactory" /> + <filter class="solr.ASCIIFoldingFilterFactory"/> + </analyzer> + </fieldType> + --> + <!-- ENUMERATIONS --> + <fieldType name="contentTier" class="solr.EnumFieldType" enumsConfig="enumsConfig.xml" enumName="content_tier"/> + <fieldType name="metadataTier" class="solr.EnumFieldType" enumsConfig="enumsConfig.xml" enumName="metadata_tier"/> + <!-- MET4285: geospatial support --> + <fieldType name="coordinates" class="solr.LatLonPointSpatialField"/> + <!-- DATE SUPPORT --> + <fieldType name="daterange" class="solr.DateRangeField" omitNorms="true"/> + </types> + <fields> + <field name="is_fulltext" type="boolean" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="has_thumbnails" type="boolean" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="has_media" type="boolean" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="filter_tags" type="int" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="facet_tags" type="int" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="has_landingpage" type="boolean" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="text" type="text_general" indexed="true" stored="false" multiValued="true" uninvertible="false"/> + <field name="timestamp" type="date" indexed="true" stored="true" default="NOW" multiValued="false" docValues="true"/> + <field name="europeana_id" type="string" indexed="true" required="true" multiValued="false" stored="true" docValues="true"/> + <field name="europeana_collectionName" type="string" indexed="true" multiValued="true" stored="true" docValues="true"/> + <field name="edm_datasetName" type="string" indexed="true" multiValued="true" stored="true" docValues="true"/> + <field name="title" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="who" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="when" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="what" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="where" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="europeana_completeness" type="int" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="CREATOR" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="CONTRIBUTOR" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="UGC" type="string" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="LANGUAGE" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="TYPE" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="YEAR" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="PROVIDER" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="DATA_PROVIDER" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="COUNTRY" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="RIGHTS" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="COMPLETENESS" type="string" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="edm_previewNoDistribute" type="boolean" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="subject" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <field name="provider_aggregation_edm_dataProvider" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="provider_aggregation_edm_dataProvider.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <!-- EA1660 SG: edm:preview field --> + <field name="europeana_aggregation_edm_preview" type="string" indexed="false" stored="true" multiValued="false" docValues="true"/> + <field name="provider_aggregation_edm_hasView" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_isShownBy" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_isShownAt" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_object" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_provider" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="provider_aggregation_edm_provider.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_dc_rights" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="provider_aggregation_dc_rights.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_rights" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="provider_aggregation_edm_rights.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_edm_intermediateProvider" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="provider_aggregation_edm_intermediateProvider.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="europeana_aggregation_edm_country" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="europeana_aggregation_edm_country.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="europeana_aggregation_edm_language" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="europeana_aggregation_edm_language.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="edm_webResource" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_dc_rights" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="wr_dc_rights.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_edm_rights" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="wr_edm_rights.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_edm_isNextInSequence" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_contributor" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <dynamicField name="proxy_dc_contributor.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_coverage" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_coverage.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_creator" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <dynamicField name="proxy_dc_creator.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_date" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_date.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_description" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <dynamicField name="proxy_dc_description.*" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <field name="proxy_dc_language" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_language.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_subject" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="proxy_dc_subject.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_format" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="proxy_dc_format.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_title" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <dynamicField name="proxy_dc_title.*" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <field name="proxy_dc_type" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_type.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_type_search" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <dynamicField name="proxy_dc_type_search.*" type="text_general" indexed="true" stored="true" multiValued="true" uninvertible="false"/> + <field name="proxy_dc_source" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_source.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_rights" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_rights.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_identifier" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dc_identifier.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_created" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_created.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_issued" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_issued.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_spatial" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_spatial.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_temporal" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_temporal.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dc_publisher" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="proxy_dc_publisher.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_hasPart" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_hasPart.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_isPartOf" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_isPartOf.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_provenance" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_provenance.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_medium" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="proxy_dcterms_medium.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_dcterms_alternative" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_dcterms_alternative.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_edm_type" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="edm_UGC" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="edm_agent" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_skos_prefLabel" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="ag_skos_prefLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_skos_altLabel" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="ag_skos_altLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_foaf_name" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="ag_foaf_name.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="edm_timespan" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ts_skos_prefLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="ts_skos_prefLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ts_skos_altLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="ts_skos_altLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="edm_place" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="pl_skos_prefLabel" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="pl_skos_prefLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="pl_skos_altLabel" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="pl_skos_altLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="pl_wgs84_pos_lat" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="pl_wgs84_pos_long" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="pl_wgs84_pos_alt" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="skos_concept" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="cc_skos_prefLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="cc_skos_prefLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="cc_skos_altLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="cc_skos_altLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_cc_license" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_odrl_inherited_from" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_cc_license" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_cc_deprecated_on" type="date" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="foaf_organization" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="org_skos_prefLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="org_skos_prefLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="org_skos_altLabel" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="org_skos_altLabel.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="_version_" type="long" indexed="true" stored="true" multiValued="false"/> + <field name="svcs_service" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="sv_dcterms_conformsTo" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <!-- EA1660 SG: fields for EPF tiers and random search --> + <field name="metadataTier" type="metadataTier" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="contentTier" type="contentTier" indexed="true" stored="true" multiValued="false" docValues="true"/> + <dynamicField name="random_*" type="random" /> + <dynamicField name="timestamp_*" type="date" indexed="true" stored="true" multiValued="false" docValues="true"/> + <!--Fields previously undeclared in schema--> + <field name="wr_svcs_hasservice" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="wr_svcs_hasservice.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_edm_currentLocation" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_edm_currentLocation.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_edm_hasMet" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_edm_hasMet.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_edm_isRelatedTo" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_edm_isRelatedTo.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="proxy_edm_year" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="proxy_edm_year.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_rdagr2_dateOfBirth" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="ag_rdagr2_dateOfBirth.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_rdagr2_dateOfDeath" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="ag_rdagr2_dateOfDeath.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_rdagr2_professionOrOccupation" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="ag_rdagr2_professionOrOccupation.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_rdagr2_placeOfBirth" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="ag_rdagr2_placeOfBirth.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="ag_rdagr2_placeOfDeath" type="sortable_text_general" indexed="true" stored="true" multiValued="true"/> + <dynamicField name="ag_rdagr2_placeOfDeath.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_cc_odrl_inherited_from" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="provider_aggregation_cc_deprecated_on" type="date" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="wr_dcterms_isReferencedBy" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <dynamicField name="wr_dcterms_isReferencedBy.*" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <!-- EA2563: fields for the URI of edm:dataProvider (dataProvider), and edm:intermediateProvider and/or edm:provider (provider) --> + <field name="dataProvider" type="string" indexed="true" stored="true" multiValued="false" docValues="true"/> + <field name="provider" type="string" indexed="true" stored="true" multiValued="true" docValues="true"/> + <!-- MET4285: geospatial support --> + <field name="currentLocation_wgs" type="coordinates" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="coverageLocation_wgs" type="coordinates" indexed="true" stored="true" multiValued="true" docValues="true"/> + <field name="location_wgs" type="coordinates" indexed="true" stored="true" multiValued="true" docValues="true"/> + <!-- DATE SUPPORT --> + <field name="created_date" type="daterange" indexed="true" stored="true" multiValued="true"/> <!-- docValues not supported --> + <field name="created_date_begin" type="date" indexed="true" stored="true" docValues="true" multiValued="false"/> + <field name="created_date_end" type="date" indexed="true" stored="true" docValues="true" multiValued="false"/> + <field name="issued_date" type="daterange" indexed="true" stored="true" multiValued="true"/> <!-- docValues not supported --> + <field name="issued_date_begin" type="date" indexed="true" stored="true" docValues="true" multiValued="false"/> + <field name="issued_date_end" type="date" indexed="true" stored="true" docValues="true" multiValued="false"/> + <!-- undeclared fields are simply ignored: --> + <dynamicField name="*" type="string" indexed="false" stored="false" multiValued="true"/> + </fields> <uniqueKey>europeana_id</uniqueKey> - <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/> - <fieldType name="contentTier" class="solr.EnumFieldType" enumsConfig="enumsConfig.xml" enumName="content_tier"/> - <fieldType name="coordinates" class="solr.LatLonPointSpatialField"/> - <fieldType name="date" class="solr.DatePointField" positionIncrementGap="0"/> - <fieldType name="double" class="solr.DoublePointField" positionIncrementGap="0"/> - <fieldType name="float" class="solr.FloatPointField" positionIncrementGap="0"/> - <fieldType name="int" class="solr.IntPointField" positionIncrementGap="0"/> - <fieldType name="long" class="solr.LongPointField" positionIncrementGap="0"/> - <fieldType name="metadataTier" class="solr.EnumFieldType" enumsConfig="enumsConfig.xml" enumName="metadata_tier"/> - <fieldType name="random" class="solr.RandomSortField"/> - <fieldType name="sortable_text_general" class="solr.SortableTextField" positionIncrementGap="100"> - <analyzer type="index"> - <tokenizer class="solr.WhitespaceTokenizerFactory"/> - <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> - <filter class="solr.FlattenGraphFilterFactory"/> - <filter class="solr.LowerCaseFilterFactory"/> - <filter class="solr.ASCIIFoldingFilterFactory"/> - </analyzer> - <analyzer type="query"> - <tokenizer class="solr.WhitespaceTokenizerFactory"/> - <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> - <filter class="solr.LowerCaseFilterFactory"/> - <filter class="solr.ASCIIFoldingFilterFactory"/> - </analyzer> - </fieldType> - <fieldType name="string" class="solr.StrField" sortMissingLast="true"/> - <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100"> - <analyzer type="index"> - <tokenizer class="solr.WhitespaceTokenizerFactory"/> - <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> - <filter class="solr.FlattenGraphFilterFactory"/> - <filter class="solr.LowerCaseFilterFactory"/> - <filter class="solr.ASCIIFoldingFilterFactory"/> - </analyzer> - <analyzer type="query"> - <tokenizer class="solr.WhitespaceTokenizerFactory"/> - <filter class="solr.WordDelimiterGraphFilterFactory" splitOnNumerics="0"/> - <filter class="solr.LowerCaseFilterFactory"/> - <filter class="solr.ASCIIFoldingFilterFactory"/> - </analyzer> - </fieldType> - <field name="COMPLETENESS" type="string" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="CONTRIBUTOR" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="COUNTRY" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="CREATOR" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="DATA_PROVIDER" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="LANGUAGE" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="PROVIDER" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="RIGHTS" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="TYPE" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="UGC" type="string" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="YEAR" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="_version_" type="long" multiValued="false" indexed="true" stored="true"/> - <field name="ag_foaf_name" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="ag_rdagr2_dateOfBirth" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="ag_rdagr2_dateOfDeath" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="ag_rdagr2_placeOfBirth" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="ag_rdagr2_placeOfDeath" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="ag_rdagr2_professionOrOccupation" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="ag_skos_altLabel" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="ag_skos_prefLabel" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="cc_skos_altLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="cc_skos_prefLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="contentTier" type="contentTier" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="coverageLocation_wgs" type="coordinates" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="currentLocation_wgs" type="coordinates" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="dataProvider" type="string" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="edm_UGC" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="edm_agent" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="edm_datasetName" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="edm_place" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="edm_previewNoDistribute" type="boolean" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="edm_timespan" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="edm_webResource" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="europeana_aggregation_edm_country" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="europeana_aggregation_edm_language" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="europeana_aggregation_edm_preview" type="string" docValues="true" multiValued="false" indexed="false" stored="true"/> - <field name="europeana_collectionName" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="europeana_completeness" type="int" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="europeana_id" type="string" docValues="true" multiValued="false" indexed="true" required="true" stored="true"/> - <field name="facet_tags" type="int" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="filter_tags" type="int" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="foaf_organization" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="has_landingpage" type="boolean" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="has_media" type="boolean" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="has_thumbnails" type="boolean" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="is_fulltext" type="boolean" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="location_wgs" type="coordinates" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="metadataTier" type="metadataTier" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="org_skos_altLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="org_skos_prefLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="pl_skos_altLabel" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="pl_skos_prefLabel" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="pl_wgs84_pos_alt" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="pl_wgs84_pos_lat" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="pl_wgs84_pos_long" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_cc_deprecated_on" type="date" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_cc_license" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_dc_rights" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_dataProvider" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_hasView" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_intermediateProvider" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_isShownAt" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_isShownBy" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_object" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_provider" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_edm_rights" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="provider_aggregation_odrl_inherited_from" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_contributor" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_coverage" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_creator" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_date" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_description" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_format" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_identifier" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_language" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_publisher" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_rights" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_source" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_subject" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_title" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_type" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dc_type_search" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_alternative" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_created" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_hasPart" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_isPartOf" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_issued" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_medium" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_provenance" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_spatial" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_dcterms_temporal" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_edm_currentLocation" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_edm_hasMet" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_edm_isRelatedTo" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_edm_type" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="proxy_edm_year" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="skos_concept" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="subject" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="sv_dcterms_conformsTo" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="svcs_service" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="text" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="false"/> - <field name="timestamp" type="date" default="NOW" docValues="true" multiValued="false" indexed="true" stored="true"/> - <field name="title" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="ts_skos_altLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="ts_skos_prefLabel" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="what" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="when" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="where" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="who" type="sortable_text_general" multiValued="true" indexed="true" stored="true"/> - <field name="wr_cc_deprecated_on" type="date" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_cc_license" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_cc_odrl_inherited_from" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_dc_rights" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_dcterms_isReferencedBy" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_edm_isNextInSequence" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_edm_rights" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <field name="wr_svcs_hasservice" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="provider_aggregation_edm_intermediateProvider.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="provider_aggregation_edm_dataProvider.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="europeana_aggregation_edm_language.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="provider_aggregation_edm_provider.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="europeana_aggregation_edm_country.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_rdagr2_professionOrOccupation.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="provider_aggregation_edm_rights.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="provider_aggregation_dc_rights.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_alternative.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_edm_currentLocation.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="wr_dcterms_isReferencedBy.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_provenance.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_temporal.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_isPartOf.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_rdagr2_placeOfBirth.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_rdagr2_placeOfDeath.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_created.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_spatial.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_hasPart.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_edm_isRelatedTo.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_rdagr2_dateOfBirth.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_rdagr2_dateOfDeath.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_contributor.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_description.*" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_type_search.*" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_issued.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dcterms_medium.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_identifier.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_publisher.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="org_skos_prefLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="wr_svcs_hasservice.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_coverage.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_language.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_skos_prefLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ts_skos_prefLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="pl_skos_prefLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="cc_skos_prefLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="org_skos_altLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_creator.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_subject.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_skos_altLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ts_skos_altLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="pl_skos_altLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="cc_skos_altLabel.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_edm_hasMet.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_format.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_source.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_rights.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_title.*" type="text_general" uninvertible="false" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_edm_year.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="wr_edm_rights.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_date.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="proxy_dc_type.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="wr_dc_rights.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="ag_foaf_name.*" type="string" docValues="true" multiValued="true" indexed="true" stored="true"/> - <dynamicField name="timestamp_*" type="date" docValues="true" multiValued="false" indexed="true" stored="true"/> - <dynamicField name="random_*" type="random"/> - <dynamicField name="*" type="string" multiValued="true" indexed="false" stored="false"/> - <copyField source="dataProvider" dest="foaf_organization"/> - <copyField source="edm_UGC" dest="UGC"/> - <copyField source="europeana_collectionName" dest="edm_datasetName"/> + <!-- deprecated in Solr 6.x and removed in Solr 7.x + <defaultSearchField>text</defaultSearchField> + <solrQueryParser defaultOperator="AND"/> --> <copyField source="europeana_completeness" dest="COMPLETENESS"/> - <copyField source="provider" dest="foaf_organization"/> - <copyField source="provider_aggregation_odrl_inherited_from" dest="RIGHTS"/> - <copyField source="proxy_edm_type" dest="TYPE"/> - <copyField source="proxy_edm_type" dest="text"/> <copyField source="proxy_dc_contributor.*" dest="CONTRIBUTOR"/> <copyField source="europeana_aggregation_edm_country.*" dest="COUNTRY"/> <copyField source="europeana_aggregation_edm_country.*" dest="europeana_aggregation_edm_country"/> @@ -235,6 +272,9 @@ <copyField source="europeana_aggregation_edm_language.*" dest="europeana_aggregation_edm_language"/> <copyField source="provider_aggregation_edm_provider.*" dest="PROVIDER"/> <copyField source="provider_aggregation_edm_rights.*" dest="RIGHTS"/> + <copyField source="provider_aggregation_odrl_inherited_from" dest="RIGHTS"/> + <copyField source="proxy_edm_type" dest="TYPE"/> + <copyField source="edm_UGC" dest="UGC"/> <copyField source="proxy_edm_year.*" dest="YEAR"/> <copyField source="ag_foaf_name.*" dest="ag_foaf_name"/> <copyField source="ag_rdagr2_dateOfBirth.*" dest="ag_rdagr2_dateOfBirth"/> @@ -246,6 +286,7 @@ <copyField source="ag_skos_prefLabel.*" dest="ag_skos_prefLabel"/> <copyField source="cc_skos_altLabel.*" dest="cc_skos_altLabel"/> <copyField source="cc_skos_prefLabel.*" dest="cc_skos_prefLabel"/> + <copyField source="europeana_collectionName" dest="edm_datasetName"/> <copyField source="org_skos_altLabel.*" dest="org_skos_altLabel"/> <copyField source="org_skos_prefLabel.*" dest="org_skos_prefLabel"/> <copyField source="pl_skos_altLabel.*" dest="pl_skos_altLabel"/> @@ -311,6 +352,7 @@ <copyField source="proxy_dcterms_spatial.*" dest="text"/> <copyField source="proxy_dcterms_temporal.*" dest="text"/> <copyField source="proxy_edm_currentLocation.*" dest="text"/> + <copyField source="proxy_edm_type" dest="text"/> <copyField source="ag_skos_altLabel.*" dest="text"/> <copyField source="ag_skos_prefLabel.*" dest="text"/> <copyField source="ag_foaf_name.*" dest="text"/> @@ -350,4 +392,10 @@ <copyField source="wr_dcterms_isReferencedBy.*" dest="wr_dcterms_isReferencedBy"/> <copyField source="wr_edm_rights.*" dest="wr_edm_rights"/> <copyField source="wr_svcs_hasservice.*" dest="wr_svcs_hasservice"/> + <!-- Extension of EA2563: copy fields to support organizations in general --> + <copyField source="provider" dest="foaf_organization"/> + <copyField source="dataProvider" dest="foaf_organization"/> + <!-- Extension of EA2563: copy fields to support default search --> + <copyField source="provider" dest="text"/> + <copyField source="dataProvider" dest="text"/> </schema> \ No newline at end of file diff --git a/metis-media-service/pom.xml b/metis-media-service/pom.xml index f6f9d2b9e3..88d795af82 100644 --- a/metis-media-service/pom.xml +++ b/metis-media-service/pom.xml @@ -4,7 +4,7 @@ <parent> <artifactId>metis-framework</artifactId> <groupId>eu.europeana.metis</groupId> - <version>12.2</version> + <version>13</version> </parent> <artifactId>metis-media-service</artifactId> @@ -130,5 +130,10 @@ <groupId>org.wiremock</groupId> <artifactId>wiremock-standalone</artifactId> </dependency> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-junit-jupiter</artifactId> + <scope>test</scope> + </dependency> </dependencies> </project> diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfBindingFactoryProvider.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfBindingFactoryProvider.java deleted file mode 100644 index 6573393dcb..0000000000 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfBindingFactoryProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -package eu.europeana.metis.mediaprocessing; - -import eu.europeana.metis.schema.jibx.RDF; -import org.jibx.runtime.BindingDirectory; -import org.jibx.runtime.IBindingFactory; -import org.jibx.runtime.JiBXException; - -/** - * This class maintains an instance of {@link IBindingFactory} which it can make available upon - * request. - * - * TODO use {@link eu.europeana.metis.schema.convert.RdfConversionUtils} - no org.jibx.runtime.* - * import should remain. - */ -final class RdfBindingFactoryProvider { - - private static IBindingFactory rdfBindingFactory; - - /** - * Constructor - this class should not be initialized. - */ - private RdfBindingFactoryProvider() { - } - - /** - * @return A binding factory. - * @throws JiBXException In case the binding factory could not be created. - */ - static IBindingFactory getBindingFactory() throws JiBXException { - synchronized (RdfBindingFactoryProvider.class) { - if (rdfBindingFactory == null) { - rdfBindingFactory = BindingDirectory.getFactory(RDF.class); - } - return rdfBindingFactory; - } - } -} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImpl.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImpl.java index 3b272b805d..be119729db 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImpl.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImpl.java @@ -1,11 +1,19 @@ package eu.europeana.metis.mediaprocessing; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.EDM_HAS_VIEW; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.EDM_IS_SHOWN_AT; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.EDM_IS_SHOWN_BY; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.EDM_OBJECT; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.EDM_WEBRESOURCE; +import static eu.europeana.metis.mediaprocessing.RdfXpathConstants.SVCS_SERVICE; + import eu.europeana.metis.mediaprocessing.exception.RdfDeserializationException; import eu.europeana.metis.mediaprocessing.model.EnrichedRdf; import eu.europeana.metis.mediaprocessing.model.EnrichedRdfImpl; import eu.europeana.metis.mediaprocessing.model.RdfResourceEntry; import eu.europeana.metis.mediaprocessing.model.UrlType; -import eu.europeana.metis.schema.jibx.RDF; +import eu.europeana.metis.schema.convert.RdfConversionUtils; +import eu.europeana.metis.schema.convert.SerializationException; import eu.europeana.metis.utils.RdfNamespaceContext; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -15,6 +23,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -27,136 +36,95 @@ import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; -import org.jibx.runtime.IUnmarshallingContext; -import org.jibx.runtime.JiBXException; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** - * This implements RDF deserialization functionality. The code that obtains the individual resources - * does not assume that we can convert the record to an EDM internal format. Link checking must also - * run on EDM external. We therefore use XPath expressions to obtain the required data. - * - * TODO use {@link eu.europeana.metis.schema.convert.RdfConversionUtils} - no org.jibx.runtime.* - * import should remain. + * This implements RDF deserialization functionality. The code that obtains the individual resources does not assume that we can + * convert the record to an EDM internal format. Link checking must also run on EDM external. We therefore use XPath expressions + * to obtain the required data. + * <p> */ class RdfDeserializerImpl implements RdfDeserializer { - private final UnmarshallingContextWrapper unmarshallingContext = new UnmarshallingContextWrapper(); + private static final String OEMBED_NAMESPACE = "https://oembed.com/"; + private static final String XPATH_OEMBED_SERVICES = + SVCS_SERVICE + "[dcterms:conformsTo/@rdf:resource = \"" + OEMBED_NAMESPACE + "\"]"; + private static final String XPATH_OEMBED_WEB_RESOURCES = EDM_WEBRESOURCE + + "[svcs:has_service/@rdf:resource = " + XPATH_OEMBED_SERVICES + "/@rdf:about]"; + private static final String XPATH_IS_OEMBED_RESOURCE_CONDITION = "[. = " + + XPATH_OEMBED_WEB_RESOURCES + "/@rdf:about]"; + private static final String OEMBED_XPATH_CONDITION_IS_SHOWN_BY = + EDM_IS_SHOWN_BY + XPATH_IS_OEMBED_RESOURCE_CONDITION; + private static final String OEMBED_XPATH_CONDITION_HAS_VIEW = + EDM_HAS_VIEW + XPATH_IS_OEMBED_RESOURCE_CONDITION; private final XPathExpressionWrapper getObjectExpression = new XPathExpressionWrapper( - xPath -> xPath.compile("/rdf:RDF/ore:Aggregation/edm:object/@rdf:resource")); + xPath -> xPath.compile(EDM_OBJECT)); private final XPathExpressionWrapper getHasViewExpression = new XPathExpressionWrapper( - xPath -> xPath.compile("/rdf:RDF/ore:Aggregation/edm:hasView/@rdf:resource")); + xPath -> xPath.compile(EDM_HAS_VIEW)); private final XPathExpressionWrapper getIsShownAtExpression = new XPathExpressionWrapper( - xPath -> xPath.compile("/rdf:RDF/ore:Aggregation/edm:isShownAt/@rdf:resource")); + xPath -> xPath.compile(EDM_IS_SHOWN_AT)); private final XPathExpressionWrapper getIsShownByExpression = new XPathExpressionWrapper( - xPath -> xPath.compile("/rdf:RDF/ore:Aggregation/edm:isShownBy/@rdf:resource")); - - private static class XPathExpressionWrapper extends - AbstractThreadSafeWrapper<XPathExpression, RdfDeserializationException> { + xPath -> xPath.compile(EDM_IS_SHOWN_BY)); + private final XPathExpressionWrapper getOEmbedExpression = new XPathExpressionWrapper( + xPath -> xPath.compile(OEMBED_XPATH_CONDITION_HAS_VIEW + " | " + OEMBED_XPATH_CONDITION_IS_SHOWN_BY)); - XPathExpressionWrapper( - ThrowingFunction<XPath, XPathExpression, XPathExpressionException> expressionCreator) { - super(() -> { - final XPathFactory factory; - synchronized (XPathFactory.class) { - factory = XPathFactory.newInstance(); - } - final XPath xPath = factory.newXPath(); - xPath.setNamespaceContext(new RdfNamespaceContext()); - try { - return expressionCreator.apply(xPath); - } catch (XPathExpressionException e) { - throw new RdfDeserializationException("Could not initialize xpath expression.", e); - } - }); - } + private final RdfConversionUtils rdfConversionUtils = new RdfConversionUtils(); - NodeList evaluate(Document document) throws RdfDeserializationException { - return process(compiledExpression -> { - try { - return (NodeList) compiledExpression.evaluate(document, XPathConstants.NODESET); - } catch (XPathExpressionException e) { - throw new RdfDeserializationException("Problem with deserializing RDF.", e); - } - }); - } + private static List<RdfResourceEntry> convertToResourceEntries( + Map<String, ResourceInfo> urlWithTypes) { + return urlWithTypes.entrySet().stream().map(RdfDeserializerImpl::convertToResourceEntry) + .toList(); } - private static class UnmarshallingContextWrapper extends - AbstractThreadSafeWrapper<IUnmarshallingContext, RdfDeserializationException> { - - public UnmarshallingContextWrapper() { - super(() -> { - try { - return RdfBindingFactoryProvider.getBindingFactory().createUnmarshallingContext(); - } catch (JiBXException e) { - throw new RdfDeserializationException("Problem creating deserializer.", e); - } - }); - } + private static RdfResourceEntry convertToResourceEntry(Map.Entry<String, ResourceInfo> entry) { + return new RdfResourceEntry(entry.getKey(), entry.getValue().urlTypes(), + entry.getValue().configuredForOembed()); + } - public RDF deserializeToRdf(InputStream inputStream) throws RdfDeserializationException { - return process(context -> { - try { - return (RDF) context.unmarshalDocument(inputStream, "UTF-8"); - } catch (JiBXException e) { - throw new RdfDeserializationException("Problem with deserializing record to RDF.", e); - } - }); + private static <R> R performDeserialization(byte[] input, DeserializationOperation<R> operation) + throws RdfDeserializationException { + try (InputStream inputStream = new ByteArrayInputStream(input)) { + return operation.performDeserialization(inputStream); + } catch (IOException e) { + throw new RdfDeserializationException("Problem with reading byte array - Shouldn't happen.", e); } } @Override public RdfResourceEntry getMainThumbnailResourceForMediaExtraction(byte[] input) - throws RdfDeserializationException { + throws RdfDeserializationException { return performDeserialization(input, this::getMainThumbnailResourceForMediaExtraction); } @Override public RdfResourceEntry getMainThumbnailResourceForMediaExtraction(InputStream inputStream) - throws RdfDeserializationException { + throws RdfDeserializationException { return getMainThumbnailResourceForMediaExtraction(deserializeToDocument(inputStream)) - .orElse(null); - } - - private Optional<RdfResourceEntry> getMainThumbnailResourceForMediaExtraction(Document record) - throws RdfDeserializationException { - - // Get the entries of the required types. - final Map<String, Set<UrlType>> resourceEntries = getResourceEntries(record, - Collections.singleton(UrlType.URL_TYPE_FOR_MAIN_THUMBNAIL_RESOURCE)); - - // If there is not exactly one, we return an empty optional. - if (resourceEntries.size() != 1) { - return Optional.empty(); - } - - // So there is exactly one. Convert and return. - return Optional.of(convertToResourceEntries(resourceEntries).get(0)); + .orElse(null); } @Override public List<RdfResourceEntry> getRemainingResourcesForMediaExtraction(byte[] input) - throws RdfDeserializationException { + throws RdfDeserializationException { return performDeserialization(input, this::getRemainingResourcesForMediaExtraction); } @Override public List<RdfResourceEntry> getRemainingResourcesForMediaExtraction(InputStream inputStream) - throws RdfDeserializationException { + throws RdfDeserializationException { // Get all the resource entries. - final Document record = deserializeToDocument(inputStream); - final Map<String, Set<UrlType>> allResources = getResourceEntries(record, - UrlType.URL_TYPES_FOR_MEDIA_EXTRACTION); + final Document deserializedDocument = deserializeToDocument(inputStream); + final Map<String, ResourceInfo> allResources = getResourceEntries(deserializedDocument, + UrlType.URL_TYPES_FOR_MEDIA_EXTRACTION); // Find the main thumbnail resource if it exists and remove it from the result. - getMainThumbnailResourceForMediaExtraction(record).map(RdfResourceEntry::getResourceUrl) - .ifPresent(allResources::remove); + getMainThumbnailResourceForMediaExtraction(deserializedDocument).map(RdfResourceEntry::getResourceUrl) + .ifPresent(allResources::remove); // Done. return convertToResourceEntries(allResources); @@ -172,54 +140,61 @@ public List<RdfResourceEntry> getResourceEntriesForLinkChecking(byte[] input) public List<RdfResourceEntry> getResourceEntriesForLinkChecking(InputStream inputStream) throws RdfDeserializationException { return convertToResourceEntries(getResourceEntries(deserializeToDocument(inputStream), - UrlType.URL_TYPES_FOR_LINK_CHECKING)); + UrlType.URL_TYPES_FOR_LINK_CHECKING)); } - private static List<RdfResourceEntry> convertToResourceEntries( - Map<String, Set<UrlType>> urlWithTypes) { - return urlWithTypes.entrySet().stream().map(RdfDeserializerImpl::convertToResourceEntry) - .toList(); + @Override + public EnrichedRdf getRdfForResourceEnriching(byte[] input) throws RdfDeserializationException { + return performDeserialization(input, this::getRdfForResourceEnriching); } - private static RdfResourceEntry convertToResourceEntry(Map.Entry<String, Set<UrlType>> entry) { - return new RdfResourceEntry(entry.getKey(), entry.getValue()); + @Override + public EnrichedRdf getRdfForResourceEnriching(InputStream inputStream) + throws RdfDeserializationException { + try { + return new EnrichedRdfImpl(rdfConversionUtils.convertInputStreamToRdf(inputStream)); + } catch (SerializationException e) { + throw new RdfDeserializationException("Problem with deserializing record to RDF.", e); + } } - Map<String, Set<UrlType>> getResourceEntries(Document document, - Set<UrlType> allowedUrlTypes) throws RdfDeserializationException { - final Map<String, Set<UrlType>> urls = new HashMap<>(); - for (UrlType type : allowedUrlTypes) { - final Set<String> urlsForType = getUrls(document, type); - for (String url : urlsForType) { - urls.computeIfAbsent(url, k -> new HashSet<>()).add(type); - } + private Optional<RdfResourceEntry> getMainThumbnailResourceForMediaExtraction(Document document) + throws RdfDeserializationException { + + // Get the entries of the required types. + final Map<String, ResourceInfo> resourceEntries = getResourceEntries(document, + Collections.singleton(UrlType.URL_TYPE_FOR_MAIN_THUMBNAIL_RESOURCE)); + + // If there is not exactly one, we return an empty optional. + if (resourceEntries.size() != 1) { + return Optional.empty(); } - return urls; + + // So there is exactly one. Convert and return. + return Optional.of(convertToResourceEntries(resourceEntries).get(0)); } private Set<String> getUrls(Document document, UrlType type) throws RdfDeserializationException { // Determine the right expression to apply. final XPathExpressionWrapper expression = - switch (type) { - case OBJECT -> getObjectExpression; - case HAS_VIEW -> getHasViewExpression; - case IS_SHOWN_AT -> getIsShownAtExpression; - case IS_SHOWN_BY -> getIsShownByExpression; - }; + switch (type) { + case OBJECT -> getObjectExpression; + case HAS_VIEW -> getHasViewExpression; + case IS_SHOWN_AT -> getIsShownAtExpression; + case IS_SHOWN_BY -> getIsShownByExpression; + }; // Evaluate the expression and convert the node list to a set of attribute values. final NodeList nodes = expression.evaluate(document); return IntStream.range(0, nodes.getLength()).mapToObj(nodes::item).map(Node::getNodeValue) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); } private Document deserializeToDocument(InputStream inputStream) throws RdfDeserializationException { // Parse document to schema-agnostic XML document (but make parsing namespace-aware). try { - // False positive. The parser has all security settings applied (see below). - @SuppressWarnings("squid:S2755") final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); @@ -230,29 +205,101 @@ private Document deserializeToDocument(InputStream inputStream) throws RdfDeseri } } - @Override - public EnrichedRdf getRdfForResourceEnriching(byte[] input) throws RdfDeserializationException { - return performDeserialization(input, this::getRdfForResourceEnriching); + private Set<String> getOEmbedUrls(Document document) throws RdfDeserializationException { + final NodeList oEmbedNodes = getOEmbedExpression.evaluate(document); + return IntStream.range(0, oEmbedNodes.getLength()) + .mapToObj(oEmbedNodes::item) + .map(Node::getNodeValue) + .collect(Collectors.toSet()); } - @Override - public EnrichedRdf getRdfForResourceEnriching(InputStream inputStream) - throws RdfDeserializationException { - return new EnrichedRdfImpl(unmarshallingContext.deserializeToRdf(inputStream)); + @FunctionalInterface + private interface DeserializationOperation<R> { + + /** + * Perform deserialization r. + * + * @param inputStream the input stream + * @return the r + * @throws RdfDeserializationException the rdf deserialization exception + */ + R performDeserialization(InputStream inputStream) throws RdfDeserializationException; } - private static <R> R performDeserialization(byte[] input, DeserializationOperation<R> operation) - throws RdfDeserializationException { - try (InputStream inputStream = new ByteArrayInputStream(input)) { - return operation.performDeserialization(inputStream); - } catch (IOException e) { - throw new RdfDeserializationException("Problem with reading byte array - Shouldn't happen.", e); + private static class XPathExpressionWrapper extends + AbstractThreadSafeWrapper<XPathExpression, RdfDeserializationException> { + + /** + * Instantiates a new X path expression wrapper. + * + * @param expressionCreator the expression creator + */ + XPathExpressionWrapper( + ThrowingFunction<XPath, XPathExpression, XPathExpressionException> expressionCreator) { + super(() -> { + final XPathFactory factory; + synchronized (XPathFactory.class) { + factory = XPathFactory.newInstance(); + } + final XPath xPath = factory.newXPath(); + xPath.setNamespaceContext(new RdfNamespaceContext()); + try { + return expressionCreator.apply(xPath); + } catch (XPathExpressionException e) { + throw new RdfDeserializationException("Could not initialize xpath expression.", e); + } + }); + } + + /** + * Evaluate node list. + * + * @param document the document + * @return the node list + * @throws RdfDeserializationException the rdf deserialization exception + */ + NodeList evaluate(Document document) throws RdfDeserializationException { + return process(compiledExpression -> { + try { + return (NodeList) compiledExpression.evaluate(document, XPathConstants.NODESET); + } catch (XPathExpressionException e) { + throw new RdfDeserializationException("Problem with deserializing RDF.", e); + } + }); } } + /** + * Gets resource entries. + * + * @param document the document + * @param allowedUrlTypes the allowed url types + * @return the resource entries + * @throws RdfDeserializationException the rdf deserialization exception + */ + Map<String, ResourceInfo> getResourceEntries(Document document, + Set<UrlType> allowedUrlTypes) throws RdfDeserializationException { + + // Get the resources and their types. + final Map<String, Set<UrlType>> urls = new HashMap<>(); + for (UrlType type : allowedUrlTypes) { + final Set<String> urlsForType = getUrls(document, type); + for (String url : urlsForType) { + urls.computeIfAbsent(url, k -> new HashSet<>()).add(type); + } + } - @FunctionalInterface - private interface DeserializationOperation<R> { + // For each resource, check whether they are configured for oEmbed. + final Map<String, ResourceInfo> result = HashMap.newHashMap(urls.size()); + final Set<String> oEmbedUrls = getOEmbedUrls(document); + for (Entry<String, Set<UrlType>> entry : urls.entrySet()) { + boolean isConfiguredForOembed = oEmbedUrls.contains(entry.getKey()); + result.put(entry.getKey(), new ResourceInfo(entry.getValue(), isConfiguredForOembed)); + } - R performDeserialization(InputStream inputStream) throws RdfDeserializationException; + // Done + return result; + } + + record ResourceInfo(Set<UrlType> urlTypes, boolean configuredForOembed) { } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializer.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializer.java index e7842ad69b..b2bd40ddff 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializer.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializer.java @@ -2,7 +2,6 @@ import eu.europeana.metis.mediaprocessing.exception.RdfSerializationException; import eu.europeana.metis.mediaprocessing.model.EnrichedRdf; -import java.io.OutputStream; /** * Implementations of this interface provide a variety of serialization options for RDF files. This @@ -22,14 +21,4 @@ public interface RdfSerializer { */ byte[] serialize(EnrichedRdf rdf) throws RdfSerializationException; - /** - * Serialize an RDF into a file. This method should call the {@link EnrichedRdf#finalizeRdf()} - * method before serialization. - * - * @param rdf The RDF to serialize. - * @param outputStream The output stream to which to send the serialized file. - * @throws RdfSerializationException In case there was a problem serializing this RDF. - */ - void serialize(EnrichedRdf rdf, OutputStream outputStream) throws RdfSerializationException; - } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializerImpl.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializerImpl.java index 3d465db9bf..10d6c33d53 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializerImpl.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfSerializerImpl.java @@ -1,62 +1,23 @@ package eu.europeana.metis.mediaprocessing; -import eu.europeana.metis.schema.jibx.RDF; import eu.europeana.metis.mediaprocessing.exception.RdfSerializationException; import eu.europeana.metis.mediaprocessing.model.EnrichedRdf; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import org.jibx.runtime.IMarshallingContext; -import org.jibx.runtime.JiBXException; +import eu.europeana.metis.schema.convert.RdfConversionUtils; +import eu.europeana.metis.schema.convert.SerializationException; /** * This object implements RDF serialization functionality. - * - * TODO use {@link eu.europeana.metis.schema.convert.RdfConversionUtils} - no org.jibx.runtime.* - * import should remain. */ class RdfSerializerImpl implements RdfSerializer { - private final MarshallingContextWrapper marshallingContext = new MarshallingContextWrapper(); - - private static class MarshallingContextWrapper extends - AbstractThreadSafeWrapper<IMarshallingContext, RdfSerializationException> { - - MarshallingContextWrapper() { - super(() -> { - try { - return RdfBindingFactoryProvider.getBindingFactory().createMarshallingContext(); - } catch (JiBXException e) { - throw new RdfSerializationException("Problem creating serializer.", e); - } - }); - } - - void serializeFromRdf(RDF rdf, OutputStream outputStream) throws RdfSerializationException { - process(context -> { - try { - context.marshalDocument(rdf, "UTF-8", null, outputStream); - return null; - } catch (JiBXException e) { - throw new RdfSerializationException("Problem with serializing RDF.", e); - } - }); - } - } + private final RdfConversionUtils rdfConversionUtils = new RdfConversionUtils(); @Override public byte[] serialize(EnrichedRdf rdf) throws RdfSerializationException { - try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { - serialize(rdf, outputStream); - return outputStream.toByteArray(); - } catch (IOException e) { + try { + return rdfConversionUtils.convertRdfToBytes(rdf.finalizeRdf()); + } catch (SerializationException e) { throw new RdfSerializationException("Problem with serializing RDF.", e); } } - - @Override - public void serialize(EnrichedRdf rdf, OutputStream outputStream) - throws RdfSerializationException { - marshallingContext.serializeFromRdf(rdf.finalizeRdf(), outputStream); - } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfXpathConstants.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfXpathConstants.java new file mode 100644 index 0000000000..d13d3296df --- /dev/null +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/RdfXpathConstants.java @@ -0,0 +1,19 @@ +package eu.europeana.metis.mediaprocessing; + +/** + * Rdf xpath string constants. + */ +public final class RdfXpathConstants { + + public static final String RDF_NAMESPACE = "/rdf:RDF"; + public static final String ORE_AGGREGATION = RDF_NAMESPACE + "/ore:Aggregation"; + public static final String EDM_OBJECT = ORE_AGGREGATION + "/edm:object/@rdf:resource"; + public static final String EDM_IS_SHOWN_BY = ORE_AGGREGATION + "/edm:isShownBy/@rdf:resource"; + public static final String EDM_HAS_VIEW = ORE_AGGREGATION + "/edm:hasView/@rdf:resource"; + public static final String EDM_IS_SHOWN_AT = ORE_AGGREGATION + "/edm:isShownAt/@rdf:resource"; + public static final String SVCS_SERVICE = RDF_NAMESPACE + "/svcs:Service"; + public static final String EDM_WEBRESOURCE = RDF_NAMESPACE + "/edm:WebResource"; + + private RdfXpathConstants() {} + +} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessor.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessor.java index f191ba71f4..b6d82d3c05 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessor.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessor.java @@ -48,10 +48,13 @@ class AudioVideoProcessor implements MediaProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(AudioVideoProcessor.class); + public static final int FFPROBE_MAX_VERSION = 7; + public static final int FFPROBE_MIN_VERSION = 2; private static String globalFfprobeCommand; private final CommandExecutor commandExecutor; + private final String ffprobeCommand; /** @@ -97,7 +100,7 @@ static String discoverFfprobeCommand(CommandExecutor commandExecutor) int indexVersion = output.lastIndexOf("version ") + "version ".length(); int version = Character.isDigit(output.charAt(indexVersion)) ? Integer.parseInt(String.valueOf(output.charAt(indexVersion))) : 0; - if (!(version >= 2 && version < 5)) { + if (!(version >= FFPROBE_MIN_VERSION && version < FFPROBE_MAX_VERSION)) { throw new MediaProcessorException("ffprobe version " + version + ".x not found"); } @@ -171,7 +174,7 @@ public ResourceExtractionResultImpl extractMetadata(Resource resource, String de private Representation getRepresentationFromMpd(AdaptationSet videoAdaptationSet) throws MediaExtractionException { // If only one representation available, get that one, otherwise get the first of type video - Representation videoRepresentation = videoAdaptationSet.getRepresentations().get(0); + Representation videoRepresentation = videoAdaptationSet.getRepresentations().getFirst(); if (videoAdaptationSet.getRepresentations().size() > 1) { //Get the one with the highest width*height if possible videoRepresentation = videoAdaptationSet.getRepresentations().stream() diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/Media3dProcessor.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/Media3dProcessor.java index a799ea0831..2367946dd5 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/Media3dProcessor.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/Media3dProcessor.java @@ -29,4 +29,5 @@ public ResourceExtractionResult copyMetadata(Resource resource, String detectedM public boolean downloadResourceForFullProcessing() { return false; } + } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImpl.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImpl.java index 998d67ef30..751f33a6d9 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImpl.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImpl.java @@ -19,8 +19,10 @@ import java.io.InputStream; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; +import java.util.List; import java.util.Optional; import java.util.Set; import org.apache.tika.io.TikaInputStream; @@ -43,7 +45,8 @@ enum ProcessingMode {FULL, REDUCED, NONE} private static final Set<UrlType> URL_TYPES_FOR_REDUCED_PROCESSING = Collections .singleton(UrlType.IS_SHOWN_AT); - private final ResourceDownloadClient resourceDownloadClient; + private final ResourceDownloadClient resourceDownloadClientOembed; + private final ResourceDownloadClient resourceDownloadClientNonOembed; private final MimeTypeDetectHttpClient mimeTypeDetectHttpClient; private final TikaWrapper tika; @@ -51,6 +54,7 @@ enum ProcessingMode {FULL, REDUCED, NONE} private final AudioVideoProcessor audioVideoProcessor; private final TextProcessor textProcessor; private final Media3dProcessor media3dProcessor; + private final OEmbedProcessor oEmbedProcessor; /** * Constructor meant for testing purposes. @@ -58,20 +62,20 @@ enum ProcessingMode {FULL, REDUCED, NONE} * @param resourceDownloadClient The download client for resources. * @param mimeTypeDetectHttpClient The mime type detector for URLs. * @param tika A tika instance. - * @param imageProcessor An image processor. - * @param audioVideoProcessor An audio/video processor. - * @param textProcessor A text processor. + * @param mediaProcessorList the media processor list */ MediaExtractorImpl(ResourceDownloadClient resourceDownloadClient, - MimeTypeDetectHttpClient mimeTypeDetectHttpClient, TikaWrapper tika, ImageProcessor imageProcessor, - AudioVideoProcessor audioVideoProcessor, TextProcessor textProcessor, Media3dProcessor media3dProcessor) { - this.resourceDownloadClient = resourceDownloadClient; + MimeTypeDetectHttpClient mimeTypeDetectHttpClient, TikaWrapper tika, + List<MediaProcessor> mediaProcessorList) { + this.resourceDownloadClientNonOembed = resourceDownloadClient; + this.resourceDownloadClientOembed = resourceDownloadClient; this.mimeTypeDetectHttpClient = mimeTypeDetectHttpClient; this.tika = tika; - this.imageProcessor = imageProcessor; - this.audioVideoProcessor = audioVideoProcessor; - this.textProcessor = textProcessor; - this.media3dProcessor = media3dProcessor; + this.imageProcessor = (ImageProcessor) getMediaProcessor(mediaProcessorList, ImageProcessor.class); + this.audioVideoProcessor = (AudioVideoProcessor) getMediaProcessor(mediaProcessorList, AudioVideoProcessor.class); + this.textProcessor = (TextProcessor) getMediaProcessor(mediaProcessorList, TextProcessor.class); + this.media3dProcessor = (Media3dProcessor) getMediaProcessor(mediaProcessorList, Media3dProcessor.class); + this.oEmbedProcessor = (OEmbedProcessor) getMediaProcessor(mediaProcessorList, OEmbedProcessor.class); } /** @@ -92,8 +96,12 @@ public MediaExtractorImpl(int redirectCount, int thumbnailGenerateTimeout, throws MediaProcessorException { final ThumbnailGenerator thumbnailGenerator = new ThumbnailGenerator( new CommandExecutor(thumbnailGenerateTimeout)); - this.resourceDownloadClient = new ResourceDownloadClient(redirectCount, - this::shouldDownloadForFullProcessing, connectTimeout, responseTimeout, downloadTimeout); + this.resourceDownloadClientOembed = new ResourceDownloadClient(redirectCount, + type -> this.shouldDownloadForFullProcessing(type, true), + connectTimeout, responseTimeout, downloadTimeout); + this.resourceDownloadClientNonOembed = new ResourceDownloadClient(redirectCount, + type -> this.shouldDownloadForFullProcessing(type, false), + connectTimeout, responseTimeout, downloadTimeout); this.mimeTypeDetectHttpClient = new MimeTypeDetectHttpClient(connectTimeout, responseTimeout, downloadTimeout); this.tika = new TikaWrapper(); @@ -102,6 +110,16 @@ public MediaExtractorImpl(int redirectCount, int thumbnailGenerateTimeout, this.textProcessor = new TextProcessor(thumbnailGenerator, new PdfToImageConverter(new CommandExecutor(thumbnailGenerateTimeout))); this.media3dProcessor = new Media3dProcessor(); + this.oEmbedProcessor = new OEmbedProcessor(); + } + + private <T> Object getMediaProcessor(List<?> mediaProcessorList, Class<T> type) { + for (Object mediaProcessor : mediaProcessorList) { + if (type.isInstance(mediaProcessor)) { + return type.cast(mediaProcessor); + } + } + return null; } @Override @@ -115,26 +133,36 @@ public ResourceExtractionResult performMediaExtraction(RdfResourceEntry resource } // Download resource and then perform media extraction on it. - try (Resource resource = downloadBasedOnProcessingMode(resourceEntry, mode)) { - return performProcessing(resource, mode, mainThumbnailAvailable); + try (Resource resource = downloadBasedOnProcessingMode(resourceEntry, mode, + resourceEntry.isResourceConfiguredForOembed())) { + return performProcessing(resource, mode, mainThumbnailAvailable, + resourceEntry.isResourceConfiguredForOembed()); } catch (IOException | RuntimeException e) { throw new MediaExtractionException( String.format("Problem while processing %s", resourceEntry.getResourceUrl()), e); } } + private ResourceDownloadClient getResourceDownloadClient(boolean potentialOembedResource) { + return potentialOembedResource ? this.resourceDownloadClientOembed + : this.resourceDownloadClientNonOembed; + } + private Resource downloadBasedOnProcessingMode(RdfResourceEntry resourceEntry, - ProcessingMode mode) throws IOException { + ProcessingMode mode, boolean potentialOembedResource) throws IOException { // Determine the download method to use (full download vs. quick ping) + final ResourceDownloadClient client = getResourceDownloadClient(potentialOembedResource); return (mode == ProcessingMode.FULL) - ? this.resourceDownloadClient.downloadBasedOnMimeType(resourceEntry) - : this.resourceDownloadClient.downloadWithoutContent(resourceEntry); + ? client.downloadBasedOnMimeType(resourceEntry) + : client.downloadWithoutContent(resourceEntry); } ProcessingMode getMode(RdfResourceEntry resourceEntry) { final ProcessingMode result; - if (URL_TYPES_FOR_FULL_PROCESSING.stream().anyMatch(resourceEntry.getUrlTypes()::contains)) { + if (resourceEntry.isResourceConfiguredForOembed()) { + result = ProcessingMode.FULL; + } else if (URL_TYPES_FOR_FULL_PROCESSING.stream().anyMatch(resourceEntry.getUrlTypes()::contains)) { result = ProcessingMode.FULL; } else if (URL_TYPES_FOR_REDUCED_PROCESSING.stream() .anyMatch(resourceEntry.getUrlTypes()::contains)) { @@ -193,26 +221,40 @@ String detectType(Path path, String providedMimeType) throws IOException { } } - MediaProcessor chooseMediaProcessor(MediaType mediaType) { - final MediaProcessor processor; - switch (mediaType) { - case TEXT -> processor = textProcessor; - case AUDIO, VIDEO -> processor = audioVideoProcessor; - case IMAGE -> processor = imageProcessor; - case THREE_D -> processor = media3dProcessor; - default -> processor = null; + List<MediaProcessor> chooseMediaProcessor(MediaType mediaType, String detectedMimeType, + boolean potentialOembedResource) { + return switch (mediaType) { + case TEXT, OTHER -> chooseMediaProcessorTextAndOther(mediaType, detectedMimeType, + potentialOembedResource); + case AUDIO, VIDEO -> List.of(audioVideoProcessor); + case IMAGE -> List.of(imageProcessor); + case THREE_D -> List.of(media3dProcessor); + }; + } + + private List<MediaProcessor> chooseMediaProcessorTextAndOther(MediaType mediaType, + String detectedMimeType, boolean potentialOembedResource) { + if (detectedMimeType == null) { + return Collections.emptyList(); + } else if (potentialOembedResource && (detectedMimeType.startsWith("text/xml") + || detectedMimeType.startsWith("application/xml") || detectedMimeType.startsWith("application/json"))) { + return List.of(oEmbedProcessor, textProcessor); + } else if (mediaType == MediaType.TEXT) { + return List.of(textProcessor); + } else { + return Collections.emptyList(); } - return processor; } void verifyAndCorrectContentAvailability(Resource resource, ProcessingMode mode, - String detectedMimeType) throws MediaExtractionException, IOException { + String detectedMimeType, boolean potentialOembedResource) + throws MediaExtractionException, IOException { // If the mime type changed and we need the content after all, we download it. - if (mode == ProcessingMode.FULL && shouldDownloadForFullProcessing(detectedMimeType) - && !shouldDownloadForFullProcessing(resource.getProvidedMimeType())) { - final RdfResourceEntry downloadInput = - new RdfResourceEntry(resource.getResourceUrl(), new ArrayList<>(resource.getUrlTypes())); + if (mode == ProcessingMode.FULL && shouldDownloadForFullProcessing(detectedMimeType, potentialOembedResource) + && !shouldDownloadForFullProcessing(resource.getProvidedMimeType(), potentialOembedResource)) { + final RdfResourceEntry downloadInput = new RdfResourceEntry(resource.getResourceUrl(), + new ArrayList<>(resource.getUrlTypes()), potentialOembedResource); ThrowingConsumer<Resource, IOException> action = resourceWithContent -> { if (resourceWithContent.hasContent()) { @@ -221,14 +263,14 @@ void verifyAndCorrectContentAvailability(Resource resource, ProcessingMode mode, } } }; - try (final Resource resourceWithContent = this.resourceDownloadClient + try (final Resource resourceWithContent = getResourceDownloadClient(potentialOembedResource) .downloadWithContent(downloadInput)) { performThrowingAction(resourceWithContent, action); } } // Verify that we have content when we need to. - if (mode == ProcessingMode.FULL && shouldDownloadForFullProcessing(detectedMimeType) + if (mode == ProcessingMode.FULL && shouldDownloadForFullProcessing(detectedMimeType, potentialOembedResource) && !resource.hasContent()) { throw new MediaExtractionException( "File content is not downloaded and mimeType does not support processing without a downloaded file."); @@ -236,7 +278,7 @@ void verifyAndCorrectContentAvailability(Resource resource, ProcessingMode mode, } ResourceExtractionResult performProcessing(Resource resource, ProcessingMode mode, - boolean mainThumbnailAvailable) throws MediaExtractionException { + boolean mainThumbnailAvailable, boolean potentialOembedResource) throws MediaExtractionException { // Sanity check - shouldn't be called for this mode. if (mode == ProcessingMode.NONE) { @@ -249,39 +291,53 @@ ResourceExtractionResult performProcessing(Resource resource, ProcessingMode mod // Verify that we have content when we need to. This can happen if the resource doesn't come // with the correct mime type. We correct this here. try { - verifyAndCorrectContentAvailability(resource, mode, detectedMimeType); + verifyAndCorrectContentAvailability(resource, mode, detectedMimeType, potentialOembedResource); } catch (IOException e) { throw new MediaExtractionException("Content availability verification error.", e); } // Choose the right media processor. - final MediaProcessor processor = chooseMediaProcessor(MediaType.getMediaType(detectedMimeType)); + final List<MediaProcessor> processors = chooseMediaProcessor( + MediaType.getMediaType(detectedMimeType), detectedMimeType, potentialOembedResource); + + // Go in order, the first result we get, we accept. + for (MediaProcessor processor: processors) { + final ResourceExtractionResult result = getResourceExtractionResult(resource, mode, + mainThumbnailAvailable, processor, detectedMimeType); + if (result != null) { + return result; + } + } + return null; + } - // Process the resource depending on the mode. + private static ResourceExtractionResult getResourceExtractionResult(Resource resource, + ProcessingMode mode, boolean mainThumbnailAvailable, MediaProcessor processor, + String detectedMimeType) throws MediaExtractionException { final ResourceExtractionResult result; - if (processor == null) { - result = null; - } else if (mode == ProcessingMode.FULL) { + // Process the resource depending on the mode. + if (mode == ProcessingMode.FULL) { result = processor.extractMetadata(resource, detectedMimeType, mainThumbnailAvailable); } else { result = processor.copyMetadata(resource, detectedMimeType); } - - // Done return result; } @Override public void close() throws IOException { - resourceDownloadClient.close(); + resourceDownloadClientOembed.close(); + resourceDownloadClientNonOembed.close(); mimeTypeDetectHttpClient.close(); } /** * @return true if and only if resources of the given type need to be downloaded before performing full processing. */ - boolean shouldDownloadForFullProcessing(String mimeType) { - return Optional.of(MediaType.getMediaType(mimeType)).map(this::chooseMediaProcessor) - .map(MediaProcessor::downloadResourceForFullProcessing).orElse(Boolean.FALSE); + boolean shouldDownloadForFullProcessing(String mimeType, boolean potentialOembedResource) { + return Optional.of(MediaType.getMediaType(mimeType)) + .map(mediaType -> chooseMediaProcessor(mediaType, mimeType, potentialOembedResource)) + .stream().flatMap(Collection::stream) + .anyMatch(MediaProcessor::downloadResourceForFullProcessing); } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaProcessor.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaProcessor.java index 3da0e07b83..f8d3126b96 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaProcessor.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/MediaProcessor.java @@ -15,7 +15,7 @@ interface MediaProcessor { * Process a resource by extracting the metadata from the content. * * @param resource The resource to process. Note that the resource may not have content (see - * {@link MediaExtractorImpl#shouldDownloadForFullProcessing(String)}). + * {@link MediaExtractorImpl#shouldDownloadForFullProcessing(String, boolean)} (String)}). * @param detectedMimeType The mime type that was detected for this resource (may deviate from the * mime type that was provided by the server and which is stored in {@link * Resource#getProvidedMimeType()}). diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessor.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessor.java new file mode 100644 index 0000000000..4cbe9fb5e9 --- /dev/null +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessor.java @@ -0,0 +1,137 @@ +package eu.europeana.metis.mediaprocessing.extraction; + +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.checkValidWidthAndHeightDimensions; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.getDurationFromModel; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.getOEmbedModelFromJson; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.getOEmbedModelFromXml; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.hasValidVersion; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.isValidTypePhoto; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.isValidTypeVideo; + +import eu.europeana.metis.mediaprocessing.exception.MediaExtractionException; +import eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedModel; +import eu.europeana.metis.mediaprocessing.model.GenericResourceMetadata; +import eu.europeana.metis.mediaprocessing.model.ImageResourceMetadata; +import eu.europeana.metis.mediaprocessing.model.Resource; +import eu.europeana.metis.mediaprocessing.model.ResourceExtractionResult; +import eu.europeana.metis.mediaprocessing.model.ResourceExtractionResultImpl; +import eu.europeana.metis.mediaprocessing.model.VideoResourceMetadata; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Locale; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The type Oembed processor. + */ +public class OEmbedProcessor implements MediaProcessor { + + /** + * The constant LOGGER. + */ + private static final Logger LOGGER = LoggerFactory.getLogger(OEmbedProcessor.class); + + /** + * Process a resource by extracting the metadata from the content. + * + * @param resource The resource to process. Note that the resource may not have content (see + * {@link MediaExtractorImpl#shouldDownloadForFullProcessing(String, boolean)}). + * @param detectedMimeType The mime type that was detected for this resource (may deviate from the mime type that was provided + * by the server and which is stored in {@link Resource#getProvidedMimeType()}). + * @param mainThumbnailAvailable Whether the main thumbnail for this record is available. This may influence the decision on + * whether to generate a thumbnail for this resource. + * @return The result of the processing. + * @throws MediaExtractionException In case something went wrong during the extraction. + */ + @Override + public ResourceExtractionResult extractMetadata(Resource resource, String detectedMimeType, boolean mainThumbnailAvailable) + throws MediaExtractionException { + + final ResourceExtractionResult resourceExtractionResult; + // the content for this oembed needs to be downloaded to be examined + if (resource.getContentPath() != null) { + try { + + // Parse the model. + final OEmbedModel embedModel; + final String oEmbedMimetype; + if (detectedMimeType.startsWith("application/json")) { + embedModel = getOEmbedModelFromJson(Files.readAllBytes(Paths.get(resource.getContentPath().toString()))); + oEmbedMimetype = "application/json+oembed"; + } else if (detectedMimeType.startsWith("application/xml") || detectedMimeType.startsWith("text/xml")) { + embedModel = getOEmbedModelFromXml(Files.readAllBytes(Paths.get(resource.getContentPath().toString()))); + oEmbedMimetype = "application/xml+oembed"; + } else { + embedModel = null; + oEmbedMimetype = null; + } + + // Validate model and compile the extraction result. + if (hasValidVersion(embedModel)) { + resourceExtractionResult = getResourceExtractionResult(resource, oEmbedMimetype, embedModel); + } else { + LOGGER.info("No oembed model found."); + resourceExtractionResult = null; + } + + } catch (IOException e) { + throw new MediaExtractionException("Unable to read OEmbedded resource", e); + } + } else { + resourceExtractionResult = null; + } + + return resourceExtractionResult; + } + + /** + * Process a resource by copying the metadata from the input without performing any extraction. + * + * @param resource The resource to process. The resource is not expected to have content. + * @param detectedMimeType The mime type that was detected for this resource (may deviate from the mime type that was provided + * by the server and which is stored in {@link Resource#getProvidedMimeType()}). + * @return The result of the processing. + * @throws MediaExtractionException In case something went wrong during the extraction. + */ + @Override + public ResourceExtractionResult copyMetadata(Resource resource, String detectedMimeType) throws MediaExtractionException { + return null; + } + + /** + * @return Whether the processor needs the downloaded resource for full processing. + */ + @Override + public boolean downloadResourceForFullProcessing() { + return true; + } + + private ResourceExtractionResult getResourceExtractionResult(Resource resource, String oEmbedMimetype, + OEmbedModel oEmbedModel) throws MediaExtractionException { + ResourceExtractionResult resourceExtractionResult = null; + if (oEmbedModel != null) { + final String oEmbedType = oEmbedModel.getType().toLowerCase(Locale.US); + if ("photo".equals(oEmbedType) && isValidTypePhoto(oEmbedModel)) { + checkValidWidthAndHeightDimensions(oEmbedModel, resource.getResourceUrl()); + ImageResourceMetadata imageResourceMetadata = new ImageResourceMetadata(oEmbedMimetype, + resource.getResourceUrl(), null, + oEmbedModel.getWidth(), oEmbedModel.getHeight(), null, null, null); + resourceExtractionResult = new ResourceExtractionResultImpl(imageResourceMetadata); + } else if ("video".equals(oEmbedType) && isValidTypeVideo(oEmbedModel)) { + checkValidWidthAndHeightDimensions(oEmbedModel, resource.getResourceUrl()); + Double duration = getDurationFromModel(oEmbedModel); + VideoResourceMetadata videoResourceMetadata = new VideoResourceMetadata(oEmbedMimetype, + resource.getResourceUrl(), null, duration, null, + oEmbedModel.getWidth(), oEmbedModel.getHeight(), null, null); + resourceExtractionResult = new ResourceExtractionResultImpl(videoResourceMetadata); + } else { + GenericResourceMetadata genericResourceMetadata = new GenericResourceMetadata(oEmbedMimetype, + resource.getResourceUrl(), null); + resourceExtractionResult = new ResourceExtractionResultImpl(genericResourceMetadata); + } + } + return resourceExtractionResult; + } +} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverter.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverter.java index 2339cab918..de32fb168a 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverter.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverter.java @@ -67,8 +67,8 @@ static String discoverGhostScriptCommand(CommandExecutor commandExecutor) final String output; output = commandExecutor.execute(Arrays.asList(command, "--version"), emptyMap(), true, message -> new MediaProcessorException("Error while looking for ghostscript tools: " + message)); - if (!output.startsWith("9.")) { - throw new MediaProcessorException("Ghostscript 9.x not found."); + if (!(output.startsWith("10.") || output.startsWith("9."))) { + throw new MediaProcessorException("Ghostscript 10.x or 9.x not found."); } // So it is installed and available. diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModel.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModel.java new file mode 100644 index 0000000000..d612029aed --- /dev/null +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModel.java @@ -0,0 +1,362 @@ +package eu.europeana.metis.mediaprocessing.extraction.oembed; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Model based on the standard https://oembed.com/ + */ +public class OEmbedModel { + + private String type; + private String version; + private String title; + private int height; + private int width; + private String url; + @JsonProperty("author_name") + private String authorName; + @JsonProperty("author_url") + private String authorUrl; + @JsonProperty("provider_name") + private String providerName; + @JsonProperty("provider_url") + private String providerUrl; + @JsonProperty("cache_age") + private String cacheAge; + @JsonProperty("thumbnail_url") + private String thumbnailUrl; + @JsonProperty("thumbnail_height") + private String thumbnailHeight; + @JsonProperty("thumbnail_width") + private String thumbnailWidth; + private String html; + private String duration; + + /** + * Gets type. + * + * @return the type + */ + public String getType() { + return type; + } + + /** + * Sets type. + * + * @param type the type + */ + public void setType(String type) { + this.type = type; + } + + /** + * Gets version. + * + * @return the version + */ + public String getVersion() { + return version; + } + + /** + * Sets version. + * + * @param version the version + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * Gets title. + * + * @return the title + */ + public String getTitle() { + return title; + } + + /** + * Sets title. + * + * @param title the title + */ + public void setTitle(String title) { + this.title = title; + } + + /** + * Gets height. + * + * @return the height + */ + public int getHeight() { + return height; + } + + /** + * Sets height. + * + * @param height the height + */ + public void setHeight(int height) { + this.height = height; + } + + /** + * Gets width. + * + * @return the width + */ + public int getWidth() { + return width; + } + + /** + * Sets width. + * + * @param width the width + */ + public void setWidth(int width) { + this.width = width; + } + + /** + * Gets url. + * + * @return the url + */ + public String getUrl() { + return url; + } + + /** + * Sets url. + * + * @param url the url + */ + public void setUrl(String url) { + this.url = url; + } + + /** + * Gets author name. + * + * @return the author name + */ + public String getAuthorName() { + return authorName; + } + + /** + * Sets author name. + * + * @param authorName the author name + */ + public void setAuthorName(String authorName) { + this.authorName = authorName; + } + + /** + * Gets author url. + * + * @return the author url + */ + public String getAuthorUrl() { + return authorUrl; + } + + /** + * Sets author url. + * + * @param authorUrl the author url + */ + public void setAuthorUrl(String authorUrl) { + this.authorUrl = authorUrl; + } + + /** + * Gets provider name. + * + * @return the provider name + */ + public String getProviderName() { + return providerName; + } + + /** + * Sets provider name. + * + * @param providerName the provider name + */ + public void setProviderName(String providerName) { + this.providerName = providerName; + } + + /** + * Gets provider url. + * + * @return the provider url + */ + public String getProviderUrl() { + return providerUrl; + } + + /** + * Sets provider url. + * + * @param providerUrl the provider url + */ + public void setProviderUrl(String providerUrl) { + this.providerUrl = providerUrl; + } + + /** + * Gets cache age. + * + * @return the cache age + */ + public String getCacheAge() { + return cacheAge; + } + + /** + * Sets cache age. + * + * @param cacheAge the cache age + */ + public void setCacheAge(String cacheAge) { + this.cacheAge = cacheAge; + } + + /** + * Gets thumbnail url. + * + * @return the thumbnail url + */ + public String getThumbnailUrl() { + return thumbnailUrl; + } + + /** + * Sets thumbnail url. + * + * @param thumbnailUrl the thumbnail url + */ + public void setThumbnailUrl(String thumbnailUrl) { + this.thumbnailUrl = thumbnailUrl; + } + + /** + * Gets thumbnail height. + * + * @return the thumbnail height + */ + public String getThumbnailHeight() { + return thumbnailHeight; + } + + /** + * Sets thumbnail height. + * + * @param thumbnailHeight the thumbnail height + */ + public void setThumbnailHeight(String thumbnailHeight) { + this.thumbnailHeight = thumbnailHeight; + } + + /** + * Gets thumbnail width. + * + * @return the thumbnail width + */ + public String getThumbnailWidth() { + return thumbnailWidth; + } + + /** + * Sets thumbnail width. + * + * @param thumbnailWidth the thumbnail width + */ + public void setThumbnailWidth(String thumbnailWidth) { + this.thumbnailWidth = thumbnailWidth; + } + + /** + * Gets html. + * + * @return the html + */ + public String getHtml() { + return html; + } + + /** + * Sets html. + * + * @param html the html + */ + public void setHtml(String html) { + this.html = html; + } + + /** + * Gets duration. + * + * @return the duration + */ + public String getDuration() { + return duration; + } + + /** + * Sets duration. + * + * @param duration the duration + */ + public void setDuration(String duration) { + this.duration = duration; + } + + @Override + public final boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof OEmbedModel that)) { + return false; + } + + return height == that.height && width == that.width && type.equals(that.type) && version.equals(that.version) + && Objects.equals(title, that.title) && url.equals(that.url) && Objects.equals(authorName, + that.authorName) && Objects.equals(authorUrl, that.authorUrl) && Objects.equals(providerName, + that.providerName) && Objects.equals(providerUrl, that.providerUrl) && Objects.equals(cacheAge, + that.cacheAge) && Objects.equals(thumbnailUrl, that.thumbnailUrl) && Objects.equals(thumbnailHeight, + that.thumbnailHeight) && Objects.equals(thumbnailWidth, that.thumbnailWidth) && Objects.equals(html, + that.html) && Objects.equals(duration, that.duration); + } + + @Override + public int hashCode() { + int result = type.hashCode(); + result = 31 * result + version.hashCode(); + result = 31 * result + Objects.hashCode(title); + result = 31 * result + height; + result = 31 * result + width; + result = 31 * result + url.hashCode(); + result = 31 * result + Objects.hashCode(authorName); + result = 31 * result + Objects.hashCode(authorUrl); + result = 31 * result + Objects.hashCode(providerName); + result = 31 * result + Objects.hashCode(providerUrl); + result = 31 * result + Objects.hashCode(cacheAge); + result = 31 * result + Objects.hashCode(thumbnailUrl); + result = 31 * result + Objects.hashCode(thumbnailHeight); + result = 31 * result + Objects.hashCode(thumbnailWidth); + result = 31 * result + Objects.hashCode(html); + result = 31 * result + Objects.hashCode(duration); + return result; + } +} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedValidation.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedValidation.java new file mode 100644 index 0000000000..12314b13ee --- /dev/null +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedValidation.java @@ -0,0 +1,361 @@ +package eu.europeana.metis.mediaprocessing.extraction.oembed; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.xml.XmlMapper; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Predicate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.util.UriComponentsBuilder; + +/** + * The type oEmbed validation methods. + */ +public final class OEmbedValidation { + + private static final String MAX_HEIGHT = "maxheight"; + private static final String MAX_WIDTH = "maxwidth"; + private static final String INVALID_URL = "Invalid url"; + private static final String OEMBED_IS_REQUIRED_MESSAGE = "OEmbedModel is required cannot be null"; + private static final String OEMBED_PROPERTY_CHECK_IGNORED = "Property check ignored it doesn't apply"; + private static final Logger LOGGER = LoggerFactory.getLogger(OEmbedValidation.class); + + private OEmbedValidation() { + // validations class + } + + /** + * Gets oembed model from json. + * + * @param jsonResource byte[] + * @return the oembed model from json + * @throws IOException the io exception + */ + public static OEmbedModel getOEmbedModelFromJson(byte[] jsonResource) throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + return objectMapper.readValue(jsonResource, OEmbedModel.class); + } + + /** + * Gets oembed model from xml. + * + * @param xmlResource byte[] + * @return the oembed model from xml + * @throws IOException the io exception + */ + public static OEmbedModel getOEmbedModelFromXml(byte[] xmlResource) throws IOException { + XmlMapper xmlMapper = new XmlMapper(); + xmlMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + return xmlMapper.readValue(xmlResource, OEmbedModel.class); + } + + /** + * Has valid height size url boolean. + * + * @param oEmbedModel the oEmbed model + * @param url the url + * @return the boolean + */ + public static boolean hasValidHeightSizeUrl(OEmbedModel oEmbedModel, String url) { + return hasValidProperty(oEmbedModel, url, + "Not valid height dimension size", + params -> { + if (hasValidMaxHeight(params) && isOEmbedValidHeight(oEmbedModel, params)) { + return true; + } else { + LOGGER.warn("Not valid height according to max height {}", url); + return false; + } + }); + } + + /** + * Has valid height size thumbnail boolean. + * + * @param oEmbedModel the oEmbed model + * @param url the url + * @return the boolean + */ + public static boolean hasValidHeightSizeThumbnail(OEmbedModel oEmbedModel, String url) { + return hasValidProperty(oEmbedModel, url, + "Not valid height thumbnail dimension size", + params -> { + if (hasValidMaxHeight(params) && hasThumbnailUrl(oEmbedModel) + && isOEmbedValidThumbnailHeight(oEmbedModel, params)) { + return true; + } else { + LOGGER.warn("Not valid thumbnail size for max height parameter {}", url); + return false; + } + }); + } + + /** + * Has valid width size url boolean. + * + * @param oEmbedModel the oEmbed model + * @param url the url + * @return the boolean + */ + public static boolean hasValidWidthSizeUrl(OEmbedModel oEmbedModel, String url) { + return hasValidProperty(oEmbedModel, url, + "Not valid width dimension size", + params -> { + if (hasValidMaxWidth(params) && isOEmbedValidWidth(oEmbedModel, params)) { + return true; + } else { + LOGGER.warn("Not valid width according to max width {}", url); + return false; + } + }); + } + + /** + * Has valid width size thumbnail boolean. + * + * @param oEmbedModel the oEmbed model + * @param url the url + * @return the boolean + */ + public static boolean hasValidWidthSizeThumbnail(OEmbedModel oEmbedModel, String url) { + return hasValidProperty(oEmbedModel, url, + "Not valid thumbnail width dimension size", + params -> { + if (hasValidMaxWidth(params) && hasThumbnailUrl(oEmbedModel) + && isOEmbedValidThumbnailWidth(oEmbedModel, params)) { + return true; + } else { + LOGGER.warn("Not valid thumbnail size for max width parameter {}", url); + return false; + } + }); + } + + /** + * Check valid width and height dimensions. + * + * @param oEmbedModel the oEmbed model + * @param url the url + */ + public static void checkValidWidthAndHeightDimensions(OEmbedModel oEmbedModel, String url) { + if (hasValidHeightSizeUrl(oEmbedModel, url)) { + LOGGER.info("Valid url dimensions of height"); + } else { + LOGGER.warn("Not valid url dimensions of height {}", url); + } + if (hasValidWidthSizeUrl(oEmbedModel, url)) { + LOGGER.info("Valid url dimensions of width"); + } else { + LOGGER.warn("Not valid url dimensions of width {}", url); + } + if (hasValidHeightSizeThumbnail(oEmbedModel, url)) { + LOGGER.info("Valid thumbnail dimensions of height"); + } else { + LOGGER.warn("Not valid thumbnail dimensions of height {}", url); + } + if (hasValidWidthSizeThumbnail(oEmbedModel, url)) { + LOGGER.info("Valid thumbnail dimensions of width"); + } else { + LOGGER.warn("Not valid thumbnail dimensions of width {}", url); + } + } + + /** + * Gets duration from model. + * + * @param oEmbedModel the oEmbed model + * @return the duration from model + */ + public static double getDurationFromModel(OEmbedModel oEmbedModel) { + double duration; + try { + duration = Optional.ofNullable(oEmbedModel.getDuration()).map(Double::parseDouble).orElse(0.0); + } catch (NumberFormatException e) { + duration = 0.0; + } + return duration; + } + + /** + * Is valid type photo boolean. + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + public static boolean isValidTypePhoto(OEmbedModel oEmbedModel) { + return hasValidModelAndType(oEmbedModel) + && "photo".equalsIgnoreCase(oEmbedModel.getType()) + && oEmbedModel.getUrl() != null && !oEmbedModel.getUrl().isEmpty() + && hasValidDimensions(oEmbedModel); + } + + /** + * Is valid type video boolean. + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + public static boolean isValidTypeVideo(OEmbedModel oEmbedModel) { + return hasValidModelAndType(oEmbedModel) + && "video".equalsIgnoreCase(oEmbedModel.getType()) + && oEmbedModel.getHtml() != null && !oEmbedModel.getHtml().isEmpty() + && hasValidDimensions(oEmbedModel); + } + + /** + * Has valid version boolean. private + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + public static boolean hasValidVersion(OEmbedModel oEmbedModel) { + return oEmbedModel != null && oEmbedModel.getVersion() != null + && oEmbedModel.getVersion().startsWith("1.0"); + } + + /** + * Has valid property boolean. + * + * @param oEmbedModel the o embed model + * @param url the url + * @param messageException the message exception + * @param predicate the predicate + * @return the boolean + */ + private static boolean hasValidProperty(OEmbedModel oEmbedModel, + String url, + String messageException, + Predicate<Map<String, String>> predicate) { + boolean result = false; + Map<String, String> params; + Objects.requireNonNull(oEmbedModel, OEMBED_IS_REQUIRED_MESSAGE); + try { + params = UriComponentsBuilder.fromUri(new URI(url)) + .build() + .getQueryParams() + .toSingleValueMap(); + if (containsMaxHeightAndMaxWidth(params)) { + result = predicate.test(params); + } else { + result = true; + LOGGER.warn(OEMBED_PROPERTY_CHECK_IGNORED); + } + } catch (URISyntaxException e) { + LOGGER.warn(INVALID_URL, e); + } catch (NumberFormatException e) { + LOGGER.warn(messageException, e); + } + return result; + } + + /** + * Is oEmbed valid thumbnail height boolean. + * + * @param oEmbedModel the oEmbed model + * @param params the params + * @return the boolean + */ + private static boolean isOEmbedValidThumbnailHeight(OEmbedModel oEmbedModel, Map<String, String> params) { + return Integer.parseInt(oEmbedModel.getThumbnailHeight()) <= Integer.parseInt(params.get(MAX_HEIGHT)); + } + + /** + * Is oEmbed valid thumbnail width boolean. + * + * @param oEmbedModel the oEmbed model + * @param params the params + * @return the boolean + */ + private static boolean isOEmbedValidThumbnailWidth(OEmbedModel oEmbedModel, Map<String, String> params) { + return Integer.parseInt(oEmbedModel.getThumbnailWidth()) <= Integer.parseInt(params.get(MAX_WIDTH)); + } + + /** + * Is oEmbed valid width boolean. + * + * @param oEmbedModel the oEmbed model + * @param params the params + * @return the boolean + */ + private static boolean isOEmbedValidWidth(OEmbedModel oEmbedModel, Map<String, String> params) { + return oEmbedModel.getWidth() <= Integer.parseInt(params.get(MAX_WIDTH)); + } + + /** + * Is oEmbed valid height boolean. + * + * @param oEmbedModel the oEmbed model + * @param params the params + * @return the boolean + */ + private static boolean isOEmbedValidHeight(OEmbedModel oEmbedModel, Map<String, String> params) { + return oEmbedModel.getHeight() <= Integer.parseInt(params.get(MAX_HEIGHT)); + } + + /** + * Has valid max height boolean. + * + * @param params the params + * @return the boolean + */ + private static boolean hasValidMaxHeight(Map<String, String> params) { + return Integer.parseInt(params.get(MAX_HEIGHT)) > 0; + } + + /** + * Has valid max width boolean. + * + * @param params the params + * @return the boolean + */ + private static boolean hasValidMaxWidth(Map<String, String> params) { + return Integer.parseInt(params.get(MAX_WIDTH)) > 0; + } + + /** + * Check if params contains max height and max width boolean. + * + * @param params the params + * @return the boolean + */ + private static boolean containsMaxHeightAndMaxWidth(Map<String, String> params) { + return params.containsKey(MAX_HEIGHT) || params.containsKey(MAX_WIDTH); + } + + /** + * Has thumbnail url boolean. + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + private static boolean hasThumbnailUrl(OEmbedModel oEmbedModel) { + return oEmbedModel.getThumbnailUrl() != null; + } + + /** + * Has valid model and type boolean. + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + private static boolean hasValidModelAndType(OEmbedModel oEmbedModel) { + return oEmbedModel != null && oEmbedModel.getType() != null; + } + + /** + * Has valid dimensions boolean. + * + * @param oEmbedModel the oEmbed model + * @return the boolean + */ + private static boolean hasValidDimensions(OEmbedModel oEmbedModel) { + return (oEmbedModel.getWidth() > 0 && oEmbedModel.getHeight() > 0); + } +} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClient.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClient.java index 98128affea..ce07d223a2 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClient.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClient.java @@ -1,16 +1,17 @@ package eu.europeana.metis.mediaprocessing.http; +import static org.apache.tika.metadata.TikaCoreProperties.RESOURCE_NAME_KEY; + import eu.europeana.metis.mediaprocessing.wrappers.TikaWrapper; import eu.europeana.metis.network.AbstractHttpClient; -import org.apache.commons.lang3.StringUtils; -import org.apache.tika.Tika; -import org.apache.tika.metadata.Metadata; -import org.springframework.http.ContentDisposition; - import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URL; +import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; +import org.apache.tika.metadata.Metadata; +import org.springframework.http.ContentDisposition; /** * An {@link AbstractHttpClient} that tries to determine the mime type of a link. It does so based @@ -77,7 +78,7 @@ protected String createResult(URL providedLink, URI actualUri, ContentDispositio final Metadata metadata = new Metadata(); final String resourceName = getResourceNameFromContentDispositionOrFromActualURI(contentDisposition, actualUri); if (resourceName != null) { - metadata.set(Metadata.RESOURCE_NAME_KEY, resourceName); + metadata.set(RESOURCE_NAME_KEY, resourceName); } if (mimeType != null) { final int separatorIndex = mimeType.indexOf(';'); diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/AudioResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/AudioResourceMetadata.java index 8bc706adf7..1ee1e4e34e 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/AudioResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/AudioResourceMetadata.java @@ -1,5 +1,7 @@ package eu.europeana.metis.mediaprocessing.model; +import eu.europeana.metis.schema.jibx.EdmType; + /** * Resource metadata for audio resources. */ @@ -77,6 +79,7 @@ protected void updateResource(WebResource resource) { resource.setSampleRate(sampleRate); resource.setSampleSize(sampleSize); resource.setCodecName(codecName); + resource.setEdmType(EdmType.SOUND); } public Double getDuration() { diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImpl.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImpl.java index 70a49fb508..7f88ab1310 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImpl.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImpl.java @@ -123,7 +123,7 @@ void updateEdmPreview(String url) { !getRdf().getEuropeanaAggregationList().isEmpty()) { final Preview preview = new Preview(); preview.setResource(url); - getRdf().getEuropeanaAggregationList().get(0).setPreview(preview); + getRdf().getEuropeanaAggregationList().getFirst().setPreview(preview); } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/GenericResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/GenericResourceMetadata.java new file mode 100644 index 0000000000..c789c3d893 --- /dev/null +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/GenericResourceMetadata.java @@ -0,0 +1,48 @@ +package eu.europeana.metis.mediaprocessing.model; + +import java.io.Serial; +import java.util.List; + +/** + * Resource metadata instance for when there is no specific resource type known. + */ +public class GenericResourceMetadata extends AbstractResourceMetadata { + + @Serial + private static final long serialVersionUID = 1594698571287313160L; + + /** + * Constructor for the case no thumbnails are available. + * + * @param mimeType The resource mime type. + * @param resourceUrl The resource URL. + * @param contentSize The file content size. + */ + public GenericResourceMetadata(String mimeType, String resourceUrl, Long contentSize) { + this(mimeType, resourceUrl, contentSize, null); + } + + /** + * Constructor. + * + * @param mimeType The resource mime type. + * @param resourceUrl The resource URL. + * @param contentSize The file content size. + * @param thumbnails The thumbnails generated for this text resource. + */ + public GenericResourceMetadata(String mimeType, String resourceUrl, Long contentSize, + List<? extends Thumbnail> thumbnails) { + super(mimeType, resourceUrl, contentSize, thumbnails); + } + + /** + * Constructor. Don't use this: it's required for deserialization. + */ + GenericResourceMetadata() { + } + + @Override + protected ResourceMetadata prepareForSerialization() { + return new ResourceMetadata(this); + } +} diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ImageResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ImageResourceMetadata.java index f0736363b4..887ed55465 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ImageResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ImageResourceMetadata.java @@ -2,6 +2,7 @@ import eu.europeana.metis.mediaprocessing.exception.MediaExtractionException; import eu.europeana.metis.schema.jibx.ColorSpaceType; +import eu.europeana.metis.schema.jibx.EdmType; import eu.europeana.metis.schema.model.Orientation; import java.util.Collections; import java.util.List; @@ -96,6 +97,8 @@ protected void updateResource(WebResource resource) { resource.setOrientation(orientation); resource.setColorspace(colorSpace); resource.setDominantColors(getDominantColors()); + resource.setEdmType(EdmType.IMAGE); + } public Integer getWidth() { diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/Media3dResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/Media3dResourceMetadata.java index 40ecc3141e..13fdc3027d 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/Media3dResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/Media3dResourceMetadata.java @@ -1,5 +1,7 @@ package eu.europeana.metis.mediaprocessing.model; +import eu.europeana.metis.schema.jibx.EdmType; + public class Media3dResourceMetadata extends AbstractResourceMetadata{ /** @@ -17,8 +19,15 @@ public class Media3dResourceMetadata extends AbstractResourceMetadata{ public Media3dResourceMetadata(String mimeType, String resourceUrl, Long contentSize) { super(mimeType, resourceUrl, contentSize, null); } + @Override protected ResourceMetadata prepareForSerialization() { return new ResourceMetadata(this); } + + @Override + protected void updateResource(WebResource resource) { + super.updateResource(resource); + resource.setEdmType(EdmType._3_D); + } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/RdfResourceEntry.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/RdfResourceEntry.java index 8033a45b9d..07598dd0a7 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/RdfResourceEntry.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/RdfResourceEntry.java @@ -20,15 +20,21 @@ public class RdfResourceEntry implements Serializable { private String resourceUrl; private Set<UrlType> urlTypes; + private boolean resourceConfiguredForOembed; + /** * Constructor. * * @param resourceUrl The URL of the resource. * @param urlTypes The resource URL types with which this resource is referenced. + * @param resourceConfiguredForOembed If the resource is configured in the record as if it were + * an oEmbed resource. */ - public RdfResourceEntry(String resourceUrl, Collection<UrlType> urlTypes) { + public RdfResourceEntry(String resourceUrl, Collection<UrlType> urlTypes, + boolean resourceConfiguredForOembed) { this.resourceUrl = resourceUrl; this.urlTypes = new HashSet<>(urlTypes); + this.resourceConfiguredForOembed = resourceConfiguredForOembed; } /** @@ -45,8 +51,13 @@ public Set<UrlType> getUrlTypes() { return Collections.unmodifiableSet(urlTypes); } + public boolean isResourceConfiguredForOembed() { + return resourceConfiguredForOembed; + } + @Override public String toString() { - return String.format("%s{resourceUrl=%s, urlTypes=%s}", RdfResourceEntry.class.getSimpleName(), resourceUrl, urlTypes); + return String.format("%s{resourceUrl=%s, urlTypes=%s, oembed=%s}", + RdfResourceEntry.class.getSimpleName(), resourceUrl, urlTypes, resourceConfiguredForOembed); } } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ResourceMetadata.java index e1a029bee7..484814be01 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/ResourceMetadata.java @@ -1,5 +1,6 @@ package eu.europeana.metis.mediaprocessing.model; +import java.io.Serial; import java.util.Set; /** @@ -11,13 +12,14 @@ public class ResourceMetadata implements IResourceMetadata { /** * Implements {@link java.io.Serializable}. */ - private static final long serialVersionUID = 1648797505550562988L; + @Serial private static final long serialVersionUID = 1648797505550562988L; private AudioResourceMetadata audioResourceMetadata; private ImageResourceMetadata imageResourceMetadata; private TextResourceMetadata textResourceMetadata; private VideoResourceMetadata videoResourceMetadata; private Media3dResourceMetadata threeDResourceMetadata; + private GenericResourceMetadata genericResourceMetadata; /** * Constructor for audio resources. @@ -79,6 +81,18 @@ public ResourceMetadata(Media3dResourceMetadata threeDResourceMetadata) { this.threeDResourceMetadata = threeDResourceMetadata; } + /** + * Constructor for generic resources. + * + * @param genericResourceMetadata The resource metadata. + */ + public ResourceMetadata(GenericResourceMetadata genericResourceMetadata) { + if (genericResourceMetadata == null) { + throw new IllegalArgumentException(); + } + this.genericResourceMetadata = genericResourceMetadata; + } + /** * Constructor. Don't use this: it's required for deserialization. */ @@ -97,6 +111,8 @@ AbstractResourceMetadata getMetaData() { result = videoResourceMetadata; } else if (threeDResourceMetadata != null){ result = threeDResourceMetadata; + } else if (genericResourceMetadata != null){ + result = genericResourceMetadata; } else { throw new IllegalStateException(); } diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/TextResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/TextResourceMetadata.java index 12b8d46c00..95233d2ae2 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/TextResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/TextResourceMetadata.java @@ -1,5 +1,6 @@ package eu.europeana.metis.mediaprocessing.model; +import eu.europeana.metis.schema.jibx.EdmType; import java.util.List; /** @@ -60,6 +61,7 @@ protected void updateResource(WebResource resource) { super.updateResource(resource); resource.setContainsText(containsText); resource.setResolution(resolution); + resource.setEdmType(EdmType.TEXT); } /** diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/VideoResourceMetadata.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/VideoResourceMetadata.java index 3cf8fbfdf6..395fa548b1 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/VideoResourceMetadata.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/VideoResourceMetadata.java @@ -1,5 +1,7 @@ package eu.europeana.metis.mediaprocessing.model; +import eu.europeana.metis.schema.jibx.EdmType; + /** * Resource metadata for video resources. */ @@ -78,6 +80,7 @@ protected void updateResource(WebResource resource) { resource.setHeight(height); resource.setCodecName(codecName); resource.setFrameRate(frameRate); + resource.setEdmType(EdmType.VIDEO); } public Double getDuration() { diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/WebResource.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/WebResource.java index 46df71f5c7..bdc172405b 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/WebResource.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/model/WebResource.java @@ -6,6 +6,7 @@ import eu.europeana.metis.schema.jibx.ColorSpaceType; import eu.europeana.metis.schema.jibx.DoubleType; import eu.europeana.metis.schema.jibx.Duration; +import eu.europeana.metis.schema.jibx.EdmType; import eu.europeana.metis.schema.jibx.HasColorSpace; import eu.europeana.metis.schema.jibx.HasMimeType; import eu.europeana.metis.schema.jibx.Height; @@ -19,11 +20,13 @@ import eu.europeana.metis.schema.jibx.SpatialResolution; import eu.europeana.metis.schema.jibx.StringType; import eu.europeana.metis.schema.jibx.Type1; +import eu.europeana.metis.schema.jibx.Type2; import eu.europeana.metis.schema.jibx.WebResourceType; import eu.europeana.metis.schema.jibx.Width; import eu.europeana.metis.schema.model.Orientation; import java.math.BigInteger; import java.util.List; +import java.util.Optional; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -146,6 +149,14 @@ void setResolution(Integer resolution) { resource.setSpatialResolution(uintVal(SpatialResolution::new, resolution)); } + void setEdmType(EdmType edmType) { + resource.setType1(Optional.ofNullable(edmType).map(type -> { + final Type2 type2 = new Type2(); + type2.setType(edmType); + return type2; + }).orElse(null)); + } + private static <T extends IntegerType> T intVal(Supplier<T> constructor, Integer value) { if (value == null) { return null; diff --git a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/wrappers/TikaWrapper.java b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/wrappers/TikaWrapper.java index a297613a1f..0e60f9b10b 100644 --- a/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/wrappers/TikaWrapper.java +++ b/metis-media-service/src/main/java/eu/europeana/metis/mediaprocessing/wrappers/TikaWrapper.java @@ -21,6 +21,7 @@ public TikaWrapper() { /** * It uses tika's own detect method + * * @param inputStream The input stream to detect from * @param metadata The metadata associated with the input stream * @return The mime type detected from the input stream @@ -28,12 +29,16 @@ public TikaWrapper() { */ public String detect(InputStream inputStream, Metadata metadata) throws IOException { + // Do the detection. Create a markable input stream for tika to use, so that the marking it does + // will not interfere with our mark above. String detectedMimeType = tika.detect(inputStream, metadata); - if(detectedMimeType.equals("application/vnd.ms-pki.stl")){ + // Normalize STL files (a 3D format). + if (detectedMimeType.equals("application/vnd.ms-pki.stl")) { return "model/x.stl-binary"; } + // Done return detectedMimeType; } } diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImplTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImplTest.java index 749d810048..3a96b4e5ea 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImplTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/RdfDeserializerImplTest.java @@ -3,9 +3,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import eu.europeana.metis.mediaprocessing.RdfDeserializerImpl.ResourceInfo; import eu.europeana.metis.mediaprocessing.exception.RdfDeserializationException; +import eu.europeana.metis.mediaprocessing.model.RdfResourceEntry; import eu.europeana.metis.mediaprocessing.model.UrlType; +import java.io.InputStream; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.parsers.DocumentBuilderFactory; @@ -19,9 +23,32 @@ class RdfDeserializerImplTest { private static final String RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; private static final String ORE_NAMESPACE = "http://www.openarchives.org/ore/terms/"; private static final String EDM_NAMESPACE = "http://www.europeana.eu/schemas/edm/"; + private static final String SVCS_NAMESPACE = "http://rdfs.org/sioc/services#"; + private static final String DCTERMS_NAMESPACE = "http://purl.org/dc/terms/"; + + private static String addEdmOEmbedResourceType(Document document, Element aggregation, String typeName, String resourceValue) { + final Element object = document.createElementNS(EDM_NAMESPACE, typeName); + object.setAttributeNS(RDF_NAMESPACE, "resource", resourceValue); + aggregation.appendChild(object); + final Element webResource = document.createElementNS(EDM_NAMESPACE, "WebResource"); + webResource.setAttributeNS(RDF_NAMESPACE, "about", resourceValue); + final Element hasService = document.createElementNS(SVCS_NAMESPACE, "has_service"); + final String oEmbedResourceService = "http://resource/services/oembed/"; + hasService.setAttributeNS(RDF_NAMESPACE, "resource", oEmbedResourceService); + webResource.appendChild(hasService); + object.setAttributeNS(RDF_NAMESPACE, "resource", resourceValue); + final Element service = document.createElementNS(SVCS_NAMESPACE, "Service"); + service.setAttributeNS(RDF_NAMESPACE, "about", oEmbedResourceService); + final Element conformsTo = document.createElementNS(DCTERMS_NAMESPACE, "conformsTo"); + conformsTo.setAttributeNS(RDF_NAMESPACE,"resource", "https://oembed.com/"); + service.appendChild(conformsTo); + object.getParentNode().getParentNode().appendChild(webResource); + object.getParentNode().getParentNode().appendChild(service); + return resourceValue; + } private static String addEdmResourceType(Document document, Element aggregation, String typeName, - String resourceValue) { + String resourceValue) { final Element object = document.createElementNS(EDM_NAMESPACE, typeName); object.setAttributeNS(RDF_NAMESPACE, "resource", resourceValue); aggregation.appendChild(object); @@ -33,27 +60,28 @@ private static String addEdmObject(Document document, Element aggregation, Strin } private static String addEdmHasView(Document document, Element aggregation, - String resourceValue) { + String resourceValue) { return addEdmResourceType(document, aggregation, "hasView", resourceValue); } private static String addEdmIsShownBy(Document document, Element aggregation, - String resourceValue) { + String resourceValue) { return addEdmResourceType(document, aggregation, "isShownBy", resourceValue); } private static String addEdmIsShownAt(Document document, Element aggregation, - String resourceValue) { + String resourceValue) { return addEdmResourceType(document, aggregation, "isShownAt", resourceValue); } @Test void testGetResourceUrlsWithDifferentResources() - throws RdfDeserializationException, ParserConfigurationException { + throws RdfDeserializationException, ParserConfigurationException { // Create document with root rdf - final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder() - .newDocument(); + final Document document = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .newDocument(); final Element rdf = document.createElementNS(RDF_NAMESPACE, "RDF"); document.appendChild(rdf); @@ -72,37 +100,38 @@ void testGetResourceUrlsWithDifferentResources() final String isShownAt = addEdmIsShownAt(document, aggregation2, "is shown at resource"); // Test method for all url types - final Map<String, Set<UrlType>> resultAllTypes = new RdfDeserializerImpl() - .getResourceEntries(document, Set.of(UrlType.values())); + final Map<String, ResourceInfo> resultAllTypes = new RdfDeserializerImpl() + .getResourceEntries(document, Set.of(UrlType.values())); assertEquals(6, resultAllTypes.size()); - assertEquals(Collections.singleton(UrlType.OBJECT), resultAllTypes.get(object)); - assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultAllTypes.get(hasView1)); - assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultAllTypes.get(hasView2)); - assertEquals(Collections.singleton(UrlType.IS_SHOWN_BY), resultAllTypes.get(isShownBy1)); - assertEquals(Collections.singleton(UrlType.IS_SHOWN_BY), resultAllTypes.get(isShownBy2)); - assertEquals(Collections.singleton(UrlType.IS_SHOWN_AT), resultAllTypes.get(isShownAt)); + assertEquals(Collections.singleton(UrlType.OBJECT), resultAllTypes.get(object).urlTypes()); + assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultAllTypes.get(hasView1).urlTypes()); + assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultAllTypes.get(hasView2).urlTypes()); + assertEquals(Collections.singleton(UrlType.IS_SHOWN_BY), resultAllTypes.get(isShownBy1).urlTypes()); + assertEquals(Collections.singleton(UrlType.IS_SHOWN_BY), resultAllTypes.get(isShownBy2).urlTypes()); + assertEquals(Collections.singleton(UrlType.IS_SHOWN_AT), resultAllTypes.get(isShownAt).urlTypes()); // Test method for selection of url types - final Map<String, Set<UrlType>> resultSelectedTypes = new RdfDeserializerImpl() - .getResourceEntries(document, Set.of(UrlType.IS_SHOWN_AT, UrlType.HAS_VIEW)); + final Map<String, ResourceInfo> resultSelectedTypes = new RdfDeserializerImpl() + .getResourceEntries(document, Set.of(UrlType.IS_SHOWN_AT, UrlType.HAS_VIEW)); assertEquals(3, resultSelectedTypes.size()); - assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultSelectedTypes.get(hasView1)); - assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultSelectedTypes.get(hasView2)); - assertEquals(Collections.singleton(UrlType.IS_SHOWN_AT), resultSelectedTypes.get(isShownAt)); + assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultSelectedTypes.get(hasView1).urlTypes()); + assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultSelectedTypes.get(hasView2).urlTypes()); + assertEquals(Collections.singleton(UrlType.IS_SHOWN_AT), resultSelectedTypes.get(isShownAt).urlTypes()); // Test method for no url types assertTrue( - new RdfDeserializerImpl().getResourceEntries(document, Collections.emptySet()) - .isEmpty()); + new RdfDeserializerImpl().getResourceEntries(document, Collections.emptySet()) + .isEmpty()); } @Test void testGetResourceUrlsWithSameResources() - throws RdfDeserializationException, ParserConfigurationException { + throws RdfDeserializationException, ParserConfigurationException { // Create document with root rdf - final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder() - .newDocument(); + final Document document = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .newDocument(); final Element rdf = document.createElementNS(RDF_NAMESPACE, "RDF"); document.appendChild(rdf); final String commonResource = "common resource"; @@ -120,27 +149,102 @@ void testGetResourceUrlsWithSameResources() addEdmIsShownAt(document, aggregation2, commonResource); // Test method for all url types - final Map<String, Set<UrlType>> resultAllTypes = new RdfDeserializerImpl() - .getResourceEntries(document, Set.of(UrlType.values())); + final Map<String, ResourceInfo> resultAllTypes = new RdfDeserializerImpl() + .getResourceEntries(document, Set.of(UrlType.values())); assertEquals(1, resultAllTypes.size()); - assertEquals(Set.of(UrlType.values()), resultAllTypes.get(commonResource)); + assertEquals(Set.of(UrlType.values()), resultAllTypes.get(commonResource).urlTypes()); // Test method for selected url types final Set<UrlType> selectedTypes = Set.of(UrlType.IS_SHOWN_BY, UrlType.OBJECT); - final Map<String, Set<UrlType>> resultSelectedTypes = new RdfDeserializerImpl() - .getResourceEntries(document, selectedTypes); + final Map<String, ResourceInfo> resultSelectedTypes = new RdfDeserializerImpl() + .getResourceEntries(document, selectedTypes); assertEquals(1, resultSelectedTypes.size()); - assertEquals(selectedTypes, resultSelectedTypes.get(commonResource)); + assertEquals(selectedTypes, resultSelectedTypes.get(commonResource).urlTypes()); } @Test void testGetResourceUrlsWithoutData() - throws RdfDeserializationException, ParserConfigurationException { + throws RdfDeserializationException, ParserConfigurationException { final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder() - .newDocument(); + .newDocument(); final Element rdf = document.createElementNS(RDF_NAMESPACE, "RDF"); document.appendChild(rdf); assertTrue(new RdfDeserializerImpl().getResourceEntries(document, Collections.emptySet()) - .isEmpty()); + .isEmpty()); + } + + @Test + void testGetResourceUrlsFromOEmbedCondition() + throws RdfDeserializationException, ParserConfigurationException { + + // given Create document with root rdf + final Document document = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .newDocument(); + final Element rdf = document.createElementNS(RDF_NAMESPACE, "RDF"); + document.appendChild(rdf); + final Element aggregation1 = document.createElementNS(ORE_NAMESPACE, "Aggregation"); + rdf.appendChild(aggregation1); + final String hasView = addEdmOEmbedResourceType(document, aggregation1, "hasView", "has view resource"); + final String isShownBy = addEdmOEmbedResourceType(document, aggregation1, "isShownBy", "is shown by resource"); + + // when test object extraction + final Map<String, ResourceInfo> resultAllTypes = new RdfDeserializerImpl() + .getResourceEntries(document, Set.of(UrlType.values())); + + // then check the oEmbedResources where successfully identified. + assertEquals(2, resultAllTypes.size()); + assertEquals(Collections.singleton(UrlType.HAS_VIEW), resultAllTypes.get(hasView).urlTypes()); + assertTrue( resultAllTypes.get(hasView).configuredForOembed()); + assertEquals(Collections.singleton(UrlType.IS_SHOWN_BY), resultAllTypes.get(isShownBy).urlTypes()); + assertTrue( resultAllTypes.get(isShownBy).configuredForOembed()); + } + + @Test + void testGetOEmbeddableObjectsFromSample_MatchingService() throws RdfDeserializationException { + // given + final InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/rdf_with_oembed_sample.xml"); + + // when + final List<RdfResourceEntry> rdfResourceEntry = new RdfDeserializerImpl().getRemainingResourcesForMediaExtraction( + inputStream); + + // then + assertEquals(2, rdfResourceEntry.size()); + assertTrue(rdfResourceEntry + .stream() + .anyMatch( + r -> r.getResourceUrl().equals( + "https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fcdn.pixabay.com%2Fvideo%2F2023%2F10%2F22%2F186070-876973719_small.mp4") + && r.isResourceConfiguredForOembed() + ) + && rdfResourceEntry + .stream() + .anyMatch( + r -> r.getResourceUrl().equals( + "http://www.flickr.com/services/oembed/?url=https%3A%2F%2Fwww.flickr.com%2Fphotos%2Fbees%2F2341623661%2F&format=json") + && r.isResourceConfiguredForOembed() + ) + ); + + } + + @Test + void testGetOEmbeddableObjectsFromSample_NoMatchingService() throws RdfDeserializationException { + // given + final InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/rdf_with_oembed_sample_II.xml"); + + // when + final List<RdfResourceEntry> rdfResourceEntry = new RdfDeserializerImpl().getRemainingResourcesForMediaExtraction( + inputStream); + + // then + assertEquals(2, rdfResourceEntry.size()); + assertTrue(rdfResourceEntry.stream().anyMatch(r -> r.getResourceUrl() + .equals("https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F42947250") + && !r.isResourceConfiguredForOembed())); + assertTrue(rdfResourceEntry.stream().anyMatch(r -> r.getResourceUrl() + .equals("http://www.cmcassociates.co.uk/Skara_Brae/landing/sb_pass_pano.html") + && !r.isResourceConfiguredForOembed())); } } diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessorTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessorTest.java index b471fb754e..ae78645801 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessorTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/AudioVideoProcessorTest.java @@ -6,6 +6,7 @@ import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; @@ -89,6 +90,11 @@ void testDiscoverFfprobeCommand() throws MediaProcessorException { // ffprobe command final String ffprobeCommand = "ffprobe"; + // Test ffprobe 6 + doReturn("ffprobe version 6.1.1-3ubuntu5 Copyright (c) 2007-2023 the FFmpeg developers") + .when(commandExecutor).execute(eq(Collections.singletonList(ffprobeCommand)), anyMap(), eq(true), any()); + assertEquals(ffprobeCommand, AudioVideoProcessor.discoverFfprobeCommand(commandExecutor)); + // Test ffprobe 4 doReturn("ffprobe version 4.4.4-0ubuntu0.18.04.1 Copyright (c) 2007-2018 the FFmpeg developers") .when(commandExecutor).execute(eq(Collections.singletonList(ffprobeCommand)), anyMap(), eq(true), any()); @@ -109,7 +115,7 @@ void testDiscoverFfprobeCommand() throws MediaProcessorException { .when(commandExecutor).execute(eq(Collections.singletonList(ffprobeCommand)), anyMap(), eq(true), any()); assertThrows(MediaProcessorException.class, () -> AudioVideoProcessor.discoverFfprobeCommand(commandExecutor)); - doReturn("ffprobe version 5.4.4-0ubuntu0.18.04.1 Copyright (c) 2007-2018 the FFmpeg developers") + doReturn("ffprobe version 7.4.4-0ubuntu0.18.04.1 Copyright (c) 2007-2018 the FFmpeg developers") .when(commandExecutor).execute(eq(Collections.singletonList(ffprobeCommand)), anyMap(), eq(true), any()); assertThrows(MediaProcessorException.class, () -> AudioVideoProcessor.discoverFfprobeCommand(commandExecutor)); @@ -195,7 +201,6 @@ void testFindValue() { final JSONObject object1 = mock(JSONObject.class); final JSONObject object2 = mock(JSONObject.class); final JSONObject[] objects = new JSONObject[]{object1, object2}; - final String key = "key"; final String value = "test value"; // Test first object only @@ -312,14 +317,14 @@ void testParseCommandResponseForAudio() throws MediaExtractionException, IOExcep final Integer bitsPerSample = 8; final Double duration = 180.062050; final Integer bitRate = 320000; - doReturn(size).when(audioVideoProcessor).findLong(eq("size"), eq(formatAsArray)); - doReturn(sampleRate).when(audioVideoProcessor).findInt(eq("sample_rate"), eq(candidates)); - doReturn(channels).when(audioVideoProcessor).findInt(eq("channels"), eq(candidates)); + doReturn(size).when(audioVideoProcessor).findLong("size", formatAsArray); + doReturn(sampleRate).when(audioVideoProcessor).findInt("sample_rate", candidates); + doReturn(channels).when(audioVideoProcessor).findInt("channels", candidates); doReturn(bitsPerSample).when(audioVideoProcessor) - .findInt(eq("bits_per_sample"), eq(candidates)); - doReturn(duration).when(audioVideoProcessor).findDouble(eq("duration"), eq(candidates)); - doReturn(bitRate).when(audioVideoProcessor).findInt(eq("bit_rate"), eq(candidates)); - doReturn("aac").when(audioVideoProcessor).findString(eq("codec_name"), eq(candidates)); + .findInt("bits_per_sample", candidates); + doReturn(duration).when(audioVideoProcessor).findDouble("duration", candidates); + doReturn(bitRate).when(audioVideoProcessor).findInt("bit_rate", candidates); + doReturn("aac").when(audioVideoProcessor).findString("codec_name", candidates); // Run and verify final AbstractResourceMetadata abstractMetadata = audioVideoProcessor @@ -370,14 +375,14 @@ void testParseCommandResponseForVideo() throws MediaExtractionException, IOExcep final Integer bitRate = 595283; final int frameRateNumerator = 629150; final int frameRateDenominator = 25181; - doReturn(size).when(audioVideoProcessor).findLong(eq("size"), eq(formatAsArray)); - doReturn(width).when(audioVideoProcessor).findInt(eq("width"), eq(candidates)); - doReturn(height).when(audioVideoProcessor).findInt(eq("height"), eq(candidates)); - doReturn("h264").when(audioVideoProcessor).findString(eq("codec_name"), eq(candidates)); - doReturn(duration).when(audioVideoProcessor).findDouble(eq("duration"), eq(candidates)); - doReturn(bitRate).when(audioVideoProcessor).findInt(eq("bit_rate"), eq(candidates)); + doReturn(size).when(audioVideoProcessor).findLong("size", formatAsArray); + doReturn(width).when(audioVideoProcessor).findInt("width", candidates); + doReturn(height).when(audioVideoProcessor).findInt("height", candidates); + doReturn("h264").when(audioVideoProcessor).findString("codec_name", candidates); + doReturn(duration).when(audioVideoProcessor).findDouble("duration", candidates); + doReturn(bitRate).when(audioVideoProcessor).findInt("bit_rate", candidates); doReturn(frameRateNumerator + "/" + frameRateDenominator).when(audioVideoProcessor) - .findString(eq("avg_frame_rate"), eq(candidates)); + .findString("avg_frame_rate", candidates); // Run and verify final AbstractResourceMetadata abstractMetadata = audioVideoProcessor @@ -397,11 +402,11 @@ void testParseCommandResponseForVideo() throws MediaExtractionException, IOExcep assertEquals(width, metadata.getWidth()); // Try various options for the frame rate - doReturn("0/0").when(audioVideoProcessor).findString(eq("avg_frame_rate"), eq(candidates)); + doReturn("0/0").when(audioVideoProcessor).findString("avg_frame_rate", candidates); final AbstractResourceMetadata metadataWith0FrameRate = audioVideoProcessor .parseCommandResponse(resource, detectedMimeType, commandResponse); assertEquals(Double.valueOf(0.0), ((VideoResourceMetadata) metadataWith0FrameRate).getFrameRate()); - doReturn("1/0").when(audioVideoProcessor).findString(eq("avg_frame_rate"), eq(candidates)); + doReturn("1/0").when(audioVideoProcessor).findString("avg_frame_rate", candidates); final AbstractResourceMetadata metadataWithInvalidFrameRate = audioVideoProcessor .parseCommandResponse(resource, detectedMimeType, commandResponse); assertNull(((VideoResourceMetadata) metadataWithInvalidFrameRate).getFrameRate()); @@ -487,7 +492,7 @@ void testDownloadResourceForFullProcessing() { } @Test - void testCopy() { + void testAudioCopy() { // Create resource final Resource resource = mock(Resource.class); @@ -505,7 +510,7 @@ void testCopy() { // Verify assertNotNull(audioResult); assertNotNull(audioResult.getOriginalMetadata()); - assertTrue(audioResult.getOriginalMetadata() instanceof AudioResourceMetadata); + assertInstanceOf(AudioResourceMetadata.class, audioResult.getOriginalMetadata()); assertEquals(detectedAudioMimeType, audioResult.getOriginalMetadata().getMimeType()); assertEquals(fileSize, audioResult.getOriginalMetadata().getContentSize()); assertEquals(url, audioResult.getOriginalMetadata().getResourceUrl()); @@ -517,6 +522,17 @@ void testCopy() { assertNull(((AudioResourceMetadata) audioResult.getOriginalMetadata()).getChannels()); assertNull(((AudioResourceMetadata) audioResult.getOriginalMetadata()).getBitRate()); assertNull(((AudioResourceMetadata) audioResult.getOriginalMetadata()).getCodecName()); + } + + @Test + void testVideoCopy() { + + // Create resource + final Resource resource = mock(Resource.class); + final Long fileSize = 12345L; + final String url = "test url"; + doReturn(fileSize).when(resource).getProvidedFileSize(); + doReturn(url).when(resource).getResourceUrl(); // Mime type for video final String detectedVideoMimeType = "video/detected mime type"; @@ -527,7 +543,7 @@ void testCopy() { // Verify assertNotNull(videoResult); assertNotNull(videoResult.getOriginalMetadata()); - assertTrue(videoResult.getOriginalMetadata() instanceof VideoResourceMetadata); + assertInstanceOf(VideoResourceMetadata.class, videoResult.getOriginalMetadata()); assertEquals(detectedVideoMimeType, videoResult.getOriginalMetadata().getMimeType()); assertEquals(fileSize, videoResult.getOriginalMetadata().getContentSize()); assertEquals(url, videoResult.getOriginalMetadata().getResourceUrl()); @@ -539,6 +555,12 @@ void testCopy() { assertNull(((VideoResourceMetadata) videoResult.getOriginalMetadata()).getCodecName()); assertNull(((VideoResourceMetadata) videoResult.getOriginalMetadata()).getBitRate()); assertNull(((VideoResourceMetadata) videoResult.getOriginalMetadata()).getDuration()); + } + + @Test + void testOtherCopy() { + // Create resource + final Resource resource = mock(Resource.class); // Other mime type final String detectedOtherMimeType = "detected other mime type"; diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/ImageProcessorTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/ImageProcessorTest.java index ca7d329e72..9a1a7b2dc8 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/ImageProcessorTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/ImageProcessorTest.java @@ -1,6 +1,7 @@ package eu.europeana.metis.mediaprocessing.extraction; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -26,7 +27,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.junit.jupiter.api.BeforeAll; @@ -90,7 +90,7 @@ void testExtract() throws MediaExtractionException, IOException { final String url = "testUrl"; final File content = new File("content file"); final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry("testUrl", - Collections.singletonList(UrlType.IS_SHOWN_BY)); + Collections.singletonList(UrlType.IS_SHOWN_BY), false); final ResourceImpl resource = spy( new ResourceImpl(rdfResourceEntry, null, null, URI.create("http://www.test.com"))); final String detectedMimeType = "detected mime type"; @@ -116,7 +116,7 @@ void testExtract() throws MediaExtractionException, IOException { final ResourceExtractionResultImpl result = imageProcessor.extractMetadata(resource, detectedMimeType, true); // Verify result metadata general properties - assertTrue(result.getOriginalMetadata() instanceof ImageResourceMetadata); + assertInstanceOf(ImageResourceMetadata.class, result.getOriginalMetadata()); final ImageResourceMetadata metadata = (ImageResourceMetadata) result.getOriginalMetadata(); assertEquals(rdfResourceEntry.getResourceUrl(), metadata.getResourceUrl()); assertEquals(detectedMimeType, metadata.getMimeType()); @@ -129,8 +129,7 @@ void testExtract() throws MediaExtractionException, IOException { assertEquals(Integer.valueOf(imageMetadata.getWidth()), metadata.getWidth()); assertEquals(Integer.valueOf(imageMetadata.getHeight()), metadata.getHeight()); assertEquals(imageMetadata.getColorSpace(), metadata.getColorSpace().xmlValue()); - assertEquals(imageMetadata.getDominantColors().stream().map(color -> "#" + color) - .collect(Collectors.toList()), metadata.getDominantColors()); + assertEquals(imageMetadata.getDominantColors().stream().map(color -> "#" + color).toList(), metadata.getDominantColors()); // Verify result thumbnails assertEquals(thumbnailsAndMetadata.getRight(), result.getThumbnails()); diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImplTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImplTest.java index 81006478a7..e08bc93a4a 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImplTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/MediaExtractorImplTest.java @@ -7,6 +7,8 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; @@ -25,8 +27,11 @@ import eu.europeana.metis.mediaprocessing.http.ResourceDownloadClient; import eu.europeana.metis.mediaprocessing.model.RdfResourceEntry; import eu.europeana.metis.mediaprocessing.model.Resource; +import eu.europeana.metis.mediaprocessing.model.ResourceExtractionResult; import eu.europeana.metis.mediaprocessing.model.ResourceExtractionResultImpl; +import eu.europeana.metis.mediaprocessing.model.ResourceImpl; import eu.europeana.metis.mediaprocessing.model.UrlType; +import eu.europeana.metis.mediaprocessing.model.VideoResourceMetadata; import eu.europeana.metis.mediaprocessing.wrappers.TikaWrapper; import eu.europeana.metis.schema.model.MediaType; import java.io.IOException; @@ -38,8 +43,10 @@ import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; +import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.tika.metadata.Metadata; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -56,9 +63,14 @@ class MediaExtractorImplTest { private static AudioVideoProcessor audioVideoProcessor; private static TextProcessor textProcessor; private static Media3dProcessor media3dProcessor; - + private static OEmbedProcessor oEmbedProcessor; private static MediaExtractorImpl mediaExtractor; + private void testGetMode(ProcessingMode expected, Set<UrlType> urlTypes) { + final RdfResourceEntry entry = new RdfResourceEntry("url string", new ArrayList<>(urlTypes), false); + assertEquals(expected, mediaExtractor.getMode(entry)); + } + @BeforeAll static void prepare() { resourceDownloadClient = mock(ResourceDownloadClient.class); @@ -69,14 +81,15 @@ static void prepare() { audioVideoProcessor = mock(AudioVideoProcessor.class); textProcessor = mock(TextProcessor.class); media3dProcessor = mock(Media3dProcessor.class); + oEmbedProcessor = mock(OEmbedProcessor.class); mediaExtractor = spy(new MediaExtractorImpl(resourceDownloadClient, mimeTypeDetectHttpClient, - tika, imageProcessor, audioVideoProcessor, textProcessor, media3dProcessor)); + tika, List.of(imageProcessor, audioVideoProcessor, textProcessor, media3dProcessor, oEmbedProcessor))); } @BeforeEach void resetMocks() { reset(resourceDownloadClient, mimeTypeDetectHttpClient, commandExecutor, tika, imageProcessor, - audioVideoProcessor, textProcessor, mediaExtractor); + audioVideoProcessor, textProcessor, mediaExtractor, oEmbedProcessor); } @Test @@ -94,9 +107,9 @@ void testDetectAndVerifyMimeType() throws IOException, MediaExtractionException, // Register mime types final String detectedMimeTypeNoContent = "detected mime type no content"; - doReturn(false).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeNoContent); + doReturn(false).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeNoContent, true); final String detectedMimeTypeWithContent = "detected mime type with content"; - doReturn(true).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeWithContent); + doReturn(true).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeWithContent, true); // Test case where there is no content doReturn(false).when(resource).hasContent(); @@ -123,9 +136,9 @@ void testDetectAndVerifyMimeType() throws IOException, MediaExtractionException, // Check what happens if we are not supposed to process assertThrows(IllegalStateException.class, () -> mediaExtractor.detectAndVerifyMimeType(resource, ProcessingMode.NONE)); } - - @Test - void testVerifyAndCorrectContentAvailability () throws MediaExtractionException, IOException { + + @Test + void testVerifyAndCorrectContentAvailability() throws MediaExtractionException, IOException { // Set up the resource final String location = "resource url"; @@ -137,36 +150,36 @@ void testVerifyAndCorrectContentAvailability () throws MediaExtractionException, // Register mime types final String detectedMimeTypeNoContent = "detected mime type no content"; - doReturn(false).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeNoContent); + doReturn(false).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeNoContent, false); final String detectedMimeTypeWithContent = "detected mime type with content"; - doReturn(true).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeWithContent); + doReturn(true).when(mediaExtractor).shouldDownloadForFullProcessing(detectedMimeTypeWithContent, false); // Test case where there is content regardless of whether there should be, or the processing // mode doesn't require content or the detected mime type doesn't require content. doReturn(true).when(resource).hasContent(); doReturn(detectedMimeTypeWithContent).when(resource).getProvidedMimeType(); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeWithContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeWithContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent, false); doReturn(false).when(resource).hasContent(); doReturn(detectedMimeTypeNoContent).when(resource).getProvidedMimeType(); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent, false); doReturn(false).when(resource).hasContent(); doReturn(detectedMimeTypeWithContent).when(resource).getProvidedMimeType(); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeNoContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeWithContent, false); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.REDUCED, detectedMimeTypeNoContent, false); // Test case where there should be content but there isn't and it is flagged as an exception. doReturn(false).when(resource).hasContent(); doReturn(detectedMimeTypeWithContent).when(resource).getProvidedMimeType(); assertThrows(MediaExtractionException.class, () -> mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, - detectedMimeTypeWithContent)); - + detectedMimeTypeWithContent, false)); + // Test case where there is no content, but there should be and a correction is attempted. // Step 1: set the mocking to use a boolean that changes when content is set. final AtomicBoolean hasContent = new AtomicBoolean(false); @@ -178,10 +191,10 @@ void testVerifyAndCorrectContentAvailability () throws MediaExtractionException, doReturn(detectedMimeTypeNoContent).when(resource).getProvidedMimeType(); doReturn(resourceWithContent).when(resourceDownloadClient).downloadWithContent(any()); doReturn(true).when(resourceWithContent).hasContent(); - + // Step 2: make the call and check that the download has occurred. verify(resourceDownloadClient, never()).downloadWithContent(any()); - mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeWithContent); + mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, detectedMimeTypeWithContent, false); final ArgumentCaptor<RdfResourceEntry> entryCaptor = ArgumentCaptor.forClass(RdfResourceEntry.class); verify(resourceDownloadClient, times(1)).downloadWithContent(entryCaptor.capture()); @@ -189,23 +202,31 @@ void testVerifyAndCorrectContentAvailability () throws MediaExtractionException, final RdfResourceEntry entry = entryCaptor.getValue(); assertEquals(location, entry.getResourceUrl()); verify(resource, times(1)).markAsWithContent(content); - + // Step 3: check what happens when the download does not include content either. hasContent.set(false); doReturn(false).when(resourceWithContent).hasContent(); assertThrows(MediaExtractionException.class, () -> mediaExtractor.verifyAndCorrectContentAvailability(resource, ProcessingMode.FULL, - detectedMimeTypeWithContent)); + detectedMimeTypeWithContent, false)); } @Test void testChooseMediaProcessor() { - assertSame(imageProcessor, mediaExtractor.chooseMediaProcessor(MediaType.IMAGE)); - assertSame(audioVideoProcessor, mediaExtractor.chooseMediaProcessor(MediaType.AUDIO)); - assertSame(audioVideoProcessor, mediaExtractor.chooseMediaProcessor(MediaType.VIDEO)); - assertSame(textProcessor, mediaExtractor.chooseMediaProcessor(MediaType.TEXT)); - assertSame(media3dProcessor, mediaExtractor.chooseMediaProcessor(MediaType.THREE_D)); - assertNull(mediaExtractor.chooseMediaProcessor(MediaType.OTHER)); + assertSame(imageProcessor, mediaExtractor.chooseMediaProcessor(MediaType.IMAGE,"image/subtype", false).get(0)); + assertSame(audioVideoProcessor, mediaExtractor.chooseMediaProcessor(MediaType.AUDIO,"audio/subtype", false).get(0)); + assertSame(audioVideoProcessor, mediaExtractor.chooseMediaProcessor(MediaType.VIDEO,"video/subtype", false).get(0)); + assertSame(textProcessor, mediaExtractor.chooseMediaProcessor(MediaType.TEXT, "text/subtype", false).get(0)); + assertSame(media3dProcessor, mediaExtractor.chooseMediaProcessor(MediaType.THREE_D,"model/subtype", false).get(0)); + assertSame(oEmbedProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/json", true).get(0)); + assertSame(textProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/json", true).get(1)); + assertSame(oEmbedProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/xml", true).get(0)); + assertSame(textProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/xml", true).get(1)); + assertSame(oEmbedProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"text/xml", true).get(0)); + assertSame(textProcessor, mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"text/xml", true).get(1)); + assertTrue(mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/json", false).isEmpty()); + assertTrue(mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"application/xml", false).isEmpty()); + assertTrue(mediaExtractor.chooseMediaProcessor(MediaType.OTHER,"text/xml", false).isEmpty()); } @Test @@ -220,94 +241,99 @@ void testProcessResource() throws MediaExtractionException, IOException { doReturn(detectedMimeType).when(mediaExtractor).detectAndVerifyMimeType(eq(resource), any()); // Set processor. - doReturn(audioVideoProcessor) - .when(mediaExtractor).chooseMediaProcessor(MediaType.getMediaType(detectedMimeType)); + doReturn(List.of(audioVideoProcessor)) + .when(mediaExtractor).chooseMediaProcessor(MediaType.getMediaType(detectedMimeType), detectedMimeType, false); final ResourceExtractionResultImpl result1 = new ResourceExtractionResultImpl(null, null); doReturn(result1).when(audioVideoProcessor).extractMetadata(resource, detectedMimeType, hasMainThumbnail); final ResourceExtractionResultImpl result2 = new ResourceExtractionResultImpl(null, null); doReturn(result2).when(audioVideoProcessor).copyMetadata(resource, detectedMimeType); // Make the call. - assertSame(result1, mediaExtractor.performProcessing(resource, ProcessingMode.FULL, hasMainThumbnail)); + assertSame(result1, mediaExtractor.performProcessing(resource, ProcessingMode.FULL, hasMainThumbnail, false)); verify(mediaExtractor, times(1)).detectAndVerifyMimeType(resource, ProcessingMode.FULL); verify(mediaExtractor, times(1)).verifyAndCorrectContentAvailability(resource, - ProcessingMode.FULL, detectedMimeType); - assertSame(result2, mediaExtractor.performProcessing(resource, ProcessingMode.REDUCED, hasMainThumbnail)); + ProcessingMode.FULL, detectedMimeType, false); + assertSame(result2, mediaExtractor.performProcessing(resource, ProcessingMode.REDUCED, hasMainThumbnail, false)); verify(mediaExtractor, times(1)).detectAndVerifyMimeType(resource, ProcessingMode.REDUCED); verify(mediaExtractor, times(1)).verifyAndCorrectContentAvailability(resource, - ProcessingMode.REDUCED, detectedMimeType); - + ProcessingMode.REDUCED, detectedMimeType, false); + // Check what happens if we are not supposed to process assertThrows(IllegalStateException.class, - () -> mediaExtractor.performProcessing(resource, ProcessingMode.NONE, hasMainThumbnail)); + () -> mediaExtractor.performProcessing(resource, ProcessingMode.NONE, hasMainThumbnail, false)); // Check what happens if there is no processor - doReturn(null).when(mediaExtractor).chooseMediaProcessor(MediaType.getMediaType(detectedMimeType)); - assertNull(mediaExtractor.performProcessing(resource, ProcessingMode.FULL, hasMainThumbnail)); - assertNull(mediaExtractor.performProcessing(resource, ProcessingMode.REDUCED, hasMainThumbnail)); + doReturn(Collections.emptyList()).when(mediaExtractor).chooseMediaProcessor(MediaType.getMediaType(detectedMimeType), detectedMimeType, false); + assertNull(mediaExtractor.performProcessing(resource, ProcessingMode.FULL, hasMainThumbnail, false)); + assertNull(mediaExtractor.performProcessing(resource, ProcessingMode.REDUCED, hasMainThumbnail, false)); } @Test void testPerformMediaExtraction() throws IOException, MediaExtractionException { // Create objects and mock for full processing. - final RdfResourceEntry entry1 = new RdfResourceEntry("resource url 1", Collections.emptyList()); + final RdfResourceEntry entry1 = new RdfResourceEntry("resource url 1", Collections.emptyList(), true); final Resource resource1 = mock(Resource.class); final boolean hasMainThumbnail = false; doReturn(ProcessingMode.FULL).when(mediaExtractor).getMode(entry1); doReturn(resource1).when(resourceDownloadClient).downloadBasedOnMimeType(entry1); final ResourceExtractionResultImpl result1 = new ResourceExtractionResultImpl(null, null); - doReturn(result1).when(mediaExtractor).performProcessing(resource1, ProcessingMode.FULL, hasMainThumbnail ); + doReturn(result1).when(mediaExtractor).performProcessing(resource1, ProcessingMode.FULL, hasMainThumbnail, true); // Make the call and verify that the resource is closed. - assertSame(result1, mediaExtractor.performMediaExtraction(entry1, hasMainThumbnail )); + assertSame(result1, mediaExtractor.performMediaExtraction(entry1, hasMainThumbnail)); verify(resource1).close(); // mock for reduced processing - final RdfResourceEntry entry2 = new RdfResourceEntry("resource url 2", Collections.emptyList()); + final RdfResourceEntry entry2 = new RdfResourceEntry("resource url 2", Collections.emptyList(), true); final Resource resource2 = mock(Resource.class); doReturn(ProcessingMode.REDUCED).when(mediaExtractor).getMode(entry2); doReturn(resource2).when(resourceDownloadClient).downloadWithoutContent(entry2); final ResourceExtractionResultImpl result2 = new ResourceExtractionResultImpl(null, null); - doReturn(result2).when(mediaExtractor).performProcessing(resource2, ProcessingMode.REDUCED, hasMainThumbnail ); + doReturn(result2).when(mediaExtractor).performProcessing(resource2, ProcessingMode.REDUCED, hasMainThumbnail, true); // Make the call and verify that the resource is closed. - assertSame(result2, mediaExtractor.performMediaExtraction(entry2, hasMainThumbnail )); + assertSame(result2, mediaExtractor.performMediaExtraction(entry2, hasMainThumbnail)); verify(resource2).close(); // Check exception from downloading. - final RdfResourceEntry entry3 = new RdfResourceEntry("resource url 3", Collections.emptyList()); + final RdfResourceEntry entry3 = new RdfResourceEntry("resource url 3", Collections.emptyList(), true); doReturn(ProcessingMode.FULL).when(mediaExtractor).getMode(entry3); doThrow(IOException.class).when(resourceDownloadClient).downloadBasedOnMimeType(entry3); assertThrows(MediaExtractionException.class, - () -> mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail )); + () -> mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail)); doThrow(RuntimeException.class).when(resourceDownloadClient).downloadBasedOnMimeType(entry3); assertThrows(MediaExtractionException.class, - () -> mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail )); + () -> mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail)); // Verify sanity check doReturn(ProcessingMode.NONE).when(mediaExtractor).getMode(entry3); - assertNull(mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail )); + assertNull(mediaExtractor.performMediaExtraction(entry3, hasMainThumbnail)); } @Test void testClose() throws IOException { mediaExtractor.close(); - verify(resourceDownloadClient).close(); + verify(resourceDownloadClient, times(2)).close(); } @Test - void testShouldDownloadForFullProcessing() { + void testshouldDownloadForFullProcessing() { doReturn(true).when(imageProcessor).downloadResourceForFullProcessing(); doReturn(true).when(textProcessor).downloadResourceForFullProcessing(); doReturn(false).when(audioVideoProcessor).downloadResourceForFullProcessing(); doReturn(false).when(media3dProcessor).downloadResourceForFullProcessing(); - assertTrue(mediaExtractor.shouldDownloadForFullProcessing("image/unknown_type")); - assertTrue(mediaExtractor.shouldDownloadForFullProcessing("text/unknown_type")); - assertFalse(mediaExtractor.shouldDownloadForFullProcessing("audio/unknown_type")); - assertFalse(mediaExtractor.shouldDownloadForFullProcessing("video/unknown_type")); - assertFalse(mediaExtractor.shouldDownloadForFullProcessing("model/unknown_type")); - assertFalse(mediaExtractor.shouldDownloadForFullProcessing("unknown_type")); + doReturn(true).when(oEmbedProcessor).downloadResourceForFullProcessing(); + assertTrue(mediaExtractor.shouldDownloadForFullProcessing("image/unknown_type", false)); + assertTrue(mediaExtractor.shouldDownloadForFullProcessing("text/unknown_type", false)); + assertFalse(mediaExtractor.shouldDownloadForFullProcessing("audio/unknown_type", false)); + assertFalse(mediaExtractor.shouldDownloadForFullProcessing("video/unknown_type", false)); + assertFalse(mediaExtractor.shouldDownloadForFullProcessing("model/unknown_type", false)); + assertFalse(mediaExtractor.shouldDownloadForFullProcessing("unknown_type", false)); + assertTrue(mediaExtractor.shouldDownloadForFullProcessing("application/xml", true)); + assertTrue(mediaExtractor.shouldDownloadForFullProcessing("application/json", true)); + assertTrue(mediaExtractor.shouldDownloadForFullProcessing("application/xml", false)); + assertFalse(mediaExtractor.shouldDownloadForFullProcessing("application/json", false)); } @Test @@ -330,8 +356,49 @@ void testGetMode() { testGetMode(ProcessingMode.FULL, EnumSet.allOf(UrlType.class)); } - private void testGetMode(ProcessingMode expected, Set<UrlType> urlTypes) { - final RdfResourceEntry entry = new RdfResourceEntry("url string", new ArrayList<>(urlTypes)); - assertEquals(expected, mediaExtractor.getMode(entry)); + @Test + void getOEmbedJson() throws MediaExtractionException, IOException { + final String resourceUrl = "https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F24416915"; + + final String detectedMimeType = "application/json+oembed"; + final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry(resourceUrl, Collections.singletonList(UrlType.IS_SHOWN_BY), true); + final Resource resource = spy( + new ResourceImpl(rdfResourceEntry, null, null, URI.create(resourceUrl))); + doReturn(true) + .when(resource).hasContent(); + doReturn(detectedMimeType) + .when(tika).detect(any(InputStream.class), any(Metadata.class)); + doReturn(Paths.get(getClass().getClassLoader().getResource("__files/oembed.json").getPath())) + .when(resource).getContentPath(); + doReturn(resource).when(resourceDownloadClient).downloadBasedOnMimeType(rdfResourceEntry); + ResourceExtractionResult extractionResult = new ResourceExtractionResultImpl( + new VideoResourceMetadata(detectedMimeType, resourceUrl, 0L)); + doReturn(extractionResult).when(oEmbedProcessor).extractMetadata(any(Resource.class), anyString(), anyBoolean()); + + ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, false); + assertEquals(resourceUrl, resourceExtractionResult.getMetadata().getResourceUrl()); + } + + @Test + void getOEmbedXml() throws MediaExtractionException, IOException { + final String resourceUrl = "https://vimeo.com/api/oembed.xml?url=https%3A%2F%2Fvimeo.com%2F24416915"; + + final String detectedMimeType = "application/xml+oembed"; + final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry(resourceUrl, Collections.singletonList(UrlType.IS_SHOWN_BY), true); + final ResourceImpl resource = spy( + new ResourceImpl(rdfResourceEntry, detectedMimeType, null, URI.create(resourceUrl))); + doReturn(true) + .when(resource).hasContent(); + doReturn(detectedMimeType) + .when(tika).detect(any(InputStream.class), any(Metadata.class)); + doReturn(Paths.get(getClass().getClassLoader().getResource("__files/oembed.xml").getPath())) + .when(resource).getContentPath(); + doReturn(resource).when(resourceDownloadClient).downloadBasedOnMimeType(rdfResourceEntry); + ResourceExtractionResult extractionResult = new ResourceExtractionResultImpl( + new VideoResourceMetadata(detectedMimeType, resourceUrl, 0L)); + doReturn(extractionResult).when(oEmbedProcessor).extractMetadata(any(Resource.class), anyString(), anyBoolean()); + + ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, false); + assertEquals(resourceUrl, resourceExtractionResult.getMetadata().getResourceUrl()); } } diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessorTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessorTest.java new file mode 100644 index 0000000000..b0f251a9db --- /dev/null +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/OEmbedProcessorTest.java @@ -0,0 +1,106 @@ +package eu.europeana.metis.mediaprocessing.extraction; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.github.tomakehurst.wiremock.junit5.WireMockExtension; +import eu.europeana.metis.mediaprocessing.MediaProcessorFactory; +import eu.europeana.metis.mediaprocessing.exception.MediaExtractionException; +import eu.europeana.metis.mediaprocessing.http.ResourceDownloadClient; +import eu.europeana.metis.mediaprocessing.model.RdfResourceEntry; +import eu.europeana.metis.mediaprocessing.model.Resource; +import eu.europeana.metis.mediaprocessing.model.ResourceExtractionResult; +import eu.europeana.metis.mediaprocessing.model.UrlType; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +class OEmbedProcessorTest { + + @RegisterExtension + static WireMockExtension wireMockExtension = WireMockExtension.newInstance() + .options(wireMockConfig() + .dynamicPort() + .dynamicHttpsPort()) + .build(); + private OEmbedProcessor processor; + private ResourceDownloadClient resourceDownloadClient; + + private OEmbedResourceTest getOEmbedResourceTest(String filename, String detectedMimeType) throws IOException { + try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(filename)) { + byte[] audioBytes = inputStream.readAllBytes(); + wireMockExtension.stubFor(get("/api/resource?url=https://vimeo.com/24416915").willReturn(aResponse() + .withStatus(200) + .withBody(audioBytes) + .withHeader("Content-Disposition", "inline; filename=\"oembed.xml\""))); + } + final String resourceUrl = String.format("http://localhost:%d/api/resource?url=https://vimeo.com/24416915", + wireMockExtension.getPort()); + + final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry(resourceUrl, Collections.singletonList(UrlType.IS_SHOWN_BY), false); + final Resource resource = resourceDownloadClient.downloadBasedOnMimeType(rdfResourceEntry); + return new OEmbedResourceTest(resourceUrl, detectedMimeType, resource); + } + + private record OEmbedResourceTest(String resourceUrl, String detectedMimeType, Resource resource) { + + } + + @BeforeEach + void setUp() { + processor = new OEmbedProcessor(); + resourceDownloadClient = new ResourceDownloadClient(MediaProcessorFactory.DEFAULT_MAX_REDIRECT_COUNT, download -> true, + MediaProcessorFactory.DEFAULT_RESOURCE_CONNECT_TIMEOUT, + MediaProcessorFactory.DEFAULT_RESOURCE_RESPONSE_TIMEOUT, + MediaProcessorFactory.DEFAULT_RESOURCE_DOWNLOAD_TIMEOUT); + } + + @Test + void extractMetadata() throws MediaExtractionException, IOException { + // given + OEmbedResourceTest oembedResource = getOEmbedResourceTest("__files/oembed.xml", "application/xml+oembed"); + // when + ResourceExtractionResult resourceExtractionResult = processor.extractMetadata(oembedResource.resource(), + oembedResource.detectedMimeType(), true); + + // then + assertNotNull(resourceExtractionResult); + assertEquals(oembedResource.resourceUrl(), resourceExtractionResult.getMetadata().getResourceUrl()); + assertEquals(oembedResource.detectedMimeType(), resourceExtractionResult.getMetadata().getMimeType()); + } + + @Test + void copyMetadataWithOEmbed_expectNull() throws MediaExtractionException, IOException { + // given + OEmbedResourceTest oembedResource = getOEmbedResourceTest("__files/oembed.xml", "application/xml+oembed"); + // when + ResourceExtractionResult resourceExtractionResult = processor.copyMetadata(oembedResource.resource, + oembedResource.detectedMimeType); + // then + assertNull(resourceExtractionResult); + } + + @Test + void copyMetadataNotOEmbed_expectObject() throws MediaExtractionException, IOException { + // given + OEmbedResourceTest oembedResource = getOEmbedResourceTest("__files/not_oembed.xml", "application/xml"); + // when + ResourceExtractionResult resourceExtractionResult = processor.copyMetadata(oembedResource.resource, + oembedResource.detectedMimeType); + // then + assertNull(resourceExtractionResult); + } + + @Test + void downloadResourceForFullProcessing() { + assertTrue(processor.downloadResourceForFullProcessing()); + } +} diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverterTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverterTest.java index a8d98ae92f..dbcb81a46c 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverterTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/PdfToImageConverterTest.java @@ -49,6 +49,12 @@ void testDiscoverGhostScriptCommand() throws MediaProcessorException { final String ghostScriptCommand = "gs"; final List<String> ghostScriptVersionCommand = Arrays.asList(ghostScriptCommand, "--version"); + // Test right version + doReturn("10.02").when(commandExecutor) + .execute(eq(ghostScriptVersionCommand), anyMap(), eq(true), any()); + assertEquals(ghostScriptCommand, + PdfToImageConverter.discoverGhostScriptCommand(commandExecutor)); + // Test right version doReturn("9.26").when(commandExecutor) .execute(eq(ghostScriptVersionCommand), anyMap(), eq(true), any()); diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/TextProcessorTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/TextProcessorTest.java index be3881c28c..93a3ea3a5b 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/TextProcessorTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/TextProcessorTest.java @@ -106,7 +106,7 @@ void testExtractForRegularText() throws IOException, MediaExtractionException { // Define input final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry("testUrl", - Collections.singletonList(UrlType.IS_SHOWN_BY)); + Collections.singletonList(UrlType.IS_SHOWN_BY), false); final ResourceImpl resource = spy( new ResourceImpl(rdfResourceEntry, null, null, URI.create("http://www.test.com"))); final String detectedMimeType = "detected mime type"; @@ -160,7 +160,7 @@ void testExtractForPdf() throws IOException, MediaExtractionException { final File contentFile = new File("content"); doReturn(contentFile).when(contentPath).toFile(); final RdfResourceEntry rdfResourceEntry = new RdfResourceEntry("testUrl", - Collections.singletonList(UrlType.IS_SHOWN_BY)); + Collections.singletonList(UrlType.IS_SHOWN_BY), false); final ResourceImpl resource = spy( new ResourceImpl(rdfResourceEntry, null, null, URI.create("http://www.test.com"))); final String detectedMimeType = "application/pdf"; diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModelTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModelTest.java new file mode 100644 index 0000000000..670912a280 --- /dev/null +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/extraction/oembed/OEmbedModelTest.java @@ -0,0 +1,77 @@ +package eu.europeana.metis.mediaprocessing.extraction.oembed; + +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.getOEmbedModelFromJson; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.getOEmbedModelFromXml; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.hasValidHeightSizeThumbnail; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.hasValidHeightSizeUrl; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.hasValidWidthSizeThumbnail; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.hasValidWidthSizeUrl; +import static eu.europeana.metis.mediaprocessing.extraction.oembed.OEmbedValidation.isValidTypeVideo; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import org.junit.jupiter.api.Test; + +class OEmbedModelTest { + + @Test + void getOEmbedModelFromJsonTest() throws IOException { + InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.json"); + + OEmbedModel oEmbedModel = getOEmbedModelFromJson(inputStream.readAllBytes()); + + assertNotNull(oEmbedModel); + assertTrue(isValidTypeVideo(oEmbedModel)); + } + + @Test + void getOEmbedModelFromJsonTestNoMaxDimensions() throws IOException { + InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/test_oembed.json"); + + OEmbedModel oEmbedModel = getOEmbedModelFromJson(inputStream.readAllBytes()); + final String url = "https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F42947250"; + OEmbedValidation.checkValidWidthAndHeightDimensions(oEmbedModel, url); + + assertNotNull(oEmbedModel); + assertTrue(isValidTypeVideo(oEmbedModel)); + } + + @Test + void getOEmbedModelFromXmlTest() throws IOException { + InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.xml"); + + OEmbedModel oEmbedModel = getOEmbedModelFromXml(inputStream.readAllBytes()); + + assertNotNull(oEmbedModel); + assertTrue(isValidTypeVideo(oEmbedModel)); + } + + @Test + void checkValidWidthAndHeightDimensions() throws IOException { + String url = "https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fcdn.pixabay.com%2Fvideo%2F2023%2F10%2F22%2F186070-876973719_small.mp4&maxheight=300&maxwidth=500"; + InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.json"); + + OEmbedModel oEmbedModel = getOEmbedModelFromJson(inputStream.readAllBytes()); + + assertTrue(hasValidHeightSizeUrl(oEmbedModel, url)); + assertTrue(hasValidWidthSizeUrl(oEmbedModel, url)); + assertTrue(hasValidHeightSizeThumbnail(oEmbedModel, url)); + assertTrue(hasValidWidthSizeThumbnail(oEmbedModel, url)); + } + + @Test + void checkValidWidthAndHeightDimensions_InvalidUrl() throws IOException { + String url = "my url test"; + InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.json"); + + OEmbedModel oEmbedModel = getOEmbedModelFromJson(inputStream.readAllBytes()); + + assertFalse(hasValidHeightSizeUrl(oEmbedModel, url)); + assertFalse(hasValidWidthSizeUrl(oEmbedModel, url)); + assertFalse(hasValidHeightSizeThumbnail(oEmbedModel, url)); + assertFalse(hasValidWidthSizeThumbnail(oEmbedModel, url)); + } +} diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClientTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClientTest.java index bc76d4c7cc..981e8cd8ec 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClientTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/http/MimeTypeDetectHttpClientTest.java @@ -6,15 +6,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import com.github.tomakehurst.wiremock.junit5.WireMockExtension; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; - +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; class MimeTypeDetectHttpClientTest { @@ -107,7 +105,7 @@ void download_returnProvidedStlMimeType_expectSuccess() throws IOException, URIS String detectedMimeType = mimeTypeDetectHttpClient.download(new URI(url).toURL()); // then - assertEquals("model/stl", detectedMimeType); + assertEquals("model/x.stl-binary", detectedMimeType); } @Test @@ -146,4 +144,41 @@ void download_detectMimeTypeGlb_expectSuccess() throws IOException, URISyntaxExc assertEquals("model/gltf-binary", detectedMimeType); } + @Test + void download_detectMimeTypeOembedJson_expectSuccess() throws IOException, URISyntaxException { + // given + try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.json")) { + byte[] jsonBytes = inputStream.readAllBytes(); + wireMockExtension.stubFor(get("/api/oembed.json?url=https://vimeo.com/24416915") + .willReturn(aResponse() + .withStatus(200) + .withBody(jsonBytes) + .withHeader("Content-Disposition", "inline; filename=\"oembed.json\""))); + } + final String url = String.format("http://localhost:%d/api/oembed.json?url=https://vimeo.com/24416915", wireMockExtension.getPort()); + // when + String detectedMimeType = mimeTypeDetectHttpClient.download(new URI(url).toURL()); + + // then + assertEquals("application/json", detectedMimeType); + } + + @Test + void download_detectMimeTypeOembedXml_expectSuccess() throws IOException, URISyntaxException { + // given + try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream("__files/oembed.xml")) { + byte[] xmlBytes = inputStream.readAllBytes(); + wireMockExtension.stubFor(get("/api/oembed.xml?url=https://vimeo.com/24416915") + .willReturn(aResponse() + .withStatus(200) + .withBody(xmlBytes) + .withHeader("Content-Disposition", "inline; filename=\"oembed.xml\""))); + } + final String url = String.format("http://localhost:%d/api/oembed.xml?url=https://vimeo.com/24416915", wireMockExtension.getPort()); + // when + String detectedMimeType = mimeTypeDetectHttpClient.download(new URI(url).toURL()); + + // then + assertEquals("application/xml", detectedMimeType); + } } diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImplTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImplTest.java index 09ae1c34ac..f32284b690 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImplTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/EnrichedRdfImplTest.java @@ -5,7 +5,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -69,7 +68,7 @@ void testEnrichResource() { // Verify adding the first resource. verify(rdf, times(1)).setWebResourceList(notNull()); assertEquals(1, rdf.getWebResourceList().size()); - assertEquals(url1, rdf.getWebResourceList().get(0).getAbout()); + assertEquals(url1, rdf.getWebResourceList().getFirst().getAbout()); assertEquals(1, enrichedRdf.getResourceUrls().size()); assertEquals(names1, enrichedRdf.getThumbnailTargetNames(url1)); verify(resource1.getMetaData(), times(1)).updateResource(any()); @@ -137,7 +136,7 @@ void testFinalizeRdf() { final String url = "url value"; doReturn(url).when(enrichedRdf).getEdmPreviewThumbnailUrl(); assertEquals(rdf, enrichedRdf.finalizeRdf()); - verify(enrichedRdf, times(1)).updateEdmPreview(eq(url)); + verify(enrichedRdf, times(1)).updateEdmPreview(url); verify(enrichedRdf, times(1)).updateEdmPreview(anyString()); } diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/RdfWrapperTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/RdfWrapperTest.java index ad984e6a11..046bb304b0 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/RdfWrapperTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/RdfWrapperTest.java @@ -60,7 +60,7 @@ void testGetFirstResourceOfType (){ middleAggregation.getHasViewList().get(2).setResource(firstHasView); middleAggregation.getHasViewList().get(3).setResource(middleHasView); lastAggregation.setHasViewList(Collections.singletonList(new HasView())); - lastAggregation.getHasViewList().get(0).setResource(lastHasView); + lastAggregation.getHasViewList().getFirst().setResource(lastHasView); // Setup tests - isShownAt links final String firstIsShownAt = "firstIsShownAt"; diff --git a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/WebResourceTest.java b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/WebResourceTest.java index ee40bb32ac..c9e1f181da 100644 --- a/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/WebResourceTest.java +++ b/metis-media-service/src/test/java/eu/europeana/metis/mediaprocessing/model/WebResourceTest.java @@ -6,6 +6,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import eu.europeana.metis.schema.jibx.ColorSpaceType; +import eu.europeana.metis.schema.jibx.EdmType; import eu.europeana.metis.schema.jibx.WebResourceType; import eu.europeana.metis.schema.model.Orientation; import java.util.Arrays; @@ -212,4 +213,21 @@ void testSetResolution() { webResource.setResolution(null); assertNull(resourceType.getSpatialResolution()); } + + @Test + void testSetEdmType() { + final WebResourceType resourceType = new WebResourceType(); + final WebResource webResource = new WebResource(resourceType); + webResource.setEdmType(EdmType.VIDEO); + assertNotNull(resourceType.getType1()); + assertEquals(EdmType.VIDEO, resourceType.getType1().getType()); + + webResource.setEdmType(EdmType.IMAGE); + assertNotNull(resourceType.getType1()); + assertEquals(EdmType.IMAGE, resourceType.getType1().getType()); + + webResource.setEdmType(EdmType._3_D); + assertNotNull(resourceType.getType1()); + assertEquals(EdmType._3_D, resourceType.getType1().getType()); + } } diff --git a/metis-media-service/src/test/resources/__files/not_oembed.json b/metis-media-service/src/test/resources/__files/not_oembed.json new file mode 100644 index 0000000000..080288e0a6 --- /dev/null +++ b/metis-media-service/src/test/resources/__files/not_oembed.json @@ -0,0 +1,25 @@ +{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": [ + "GML", + "XML" + ] + }, + "GlossSee": "markup" + } + } + } + } +} diff --git a/metis-media-service/src/test/resources/__files/not_oembed.xml b/metis-media-service/src/test/resources/__files/not_oembed.xml new file mode 100644 index 0000000000..afeaf7be65 --- /dev/null +++ b/metis-media-service/src/test/resources/__files/not_oembed.xml @@ -0,0 +1,13 @@ +<?xml version="1.0" encoding="UTF-8"?> +<rdf:RDF + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:region="http://www.country-regions.fake/"> + <rdf:Description rdf:about="http://en.wikipedia.org/wiki/Oxford"> + <dc:title>Oxford</dc:title> + <dc:coverage>Oxfordshire</dc:coverage> + <dc:publisher>Wikipedia</dc:publisher> + <region:population>10000</region:population> + <region:principaltown rdf:resource="http://www.country-regions.fake/oxford"/> + </rdf:Description> +</rdf:RDF> diff --git a/metis-media-service/src/test/resources/__files/oembed.json b/metis-media-service/src/test/resources/__files/oembed.json new file mode 100644 index 0000000000..9e8e5fad7c --- /dev/null +++ b/metis-media-service/src/test/resources/__files/oembed.json @@ -0,0 +1,23 @@ +{ + "type": "video", + "version": "1.0", + "provider_name": "Vimeo", + "provider_url": "https://vimeo.com/", + "title": "Europeana promo", + "author_name": "Europeana", + "author_url": "https://vimeo.com/europeana", + "is_plus": "1", + "account_type": "plus", + "html": "<iframe src=\"https://player.vimeo.com/video/24416915?app_id=122963\" width=\"480\" height=\"270\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" title=\"Europeana promo\"></iframe>", + "width": 480, + "height": 270, + "duration": 31, + "description": "", + "thumbnail_url": "https://i.vimeocdn.com/video/223856359-d86332b534f4edd01355ee14c50b32473f746e4f56454128df8a8ca8228fffb8-d_295x166", + "thumbnail_width": 295, + "thumbnail_height": 166, + "thumbnail_url_with_play_button": "https://i.vimeocdn.com/filter/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F223856359-d86332b534f4edd01355ee14c50b32473f746e4f56454128df8a8ca8228fffb8-d_295x166&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png", + "upload_date": "2011-05-30 09:03:39", + "video_id": 24416915, + "uri": "/videos/24416915" +} diff --git a/metis-media-service/src/test/resources/__files/oembed.xml b/metis-media-service/src/test/resources/__files/oembed.xml new file mode 100644 index 0000000000..a24a6ab7f3 --- /dev/null +++ b/metis-media-service/src/test/resources/__files/oembed.xml @@ -0,0 +1,31 @@ +<?xml version="1.0" encoding="UTF-8"?> +<oembed> + <type>video</type> + <version>1.0</version> + <provider_name>Vimeo</provider_name> + <provider_url>https://vimeo.com/</provider_url> + <title>Europeana promo + Europeana + https://vimeo.com/europeana + 1 + plus + <iframe src="https://player.vimeo.com/video/24416915?app_id=122963" width="480" height="270" + frameborder="0" allow="autoplay; fullscreen; picture-in-picture; clipboard-write" title="Europeana + promo"></iframe> + + 480 + 270 + 31 + + + https://i.vimeocdn.com/video/223856359-d86332b534f4edd01355ee14c50b32473f746e4f56454128df8a8ca8228fffb8-d_295x166 + + 295 + 166 + + https://i.vimeocdn.com/filter/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F223856359-d86332b534f4edd01355ee14c50b32473f746e4f56454128df8a8ca8228fffb8-d_295x166&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png + + 2011-05-30 09:03:39 + 24416915 + /videos/24416915 + diff --git a/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample.xml b/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample.xml new file mode 100644 index 0000000000..7a40e8fc9c --- /dev/null +++ b/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample.xml @@ -0,0 +1,164 @@ + + + + IMAGE + + + + + Vimeo video + Europeana + Europeana + + + + + 58.04861 + -2.343056 + 8.0 + Skara Brae + + + Neolithic + -3180 + -2500 + + + + + + + + + + + + + + + + + + Ort, an dem Überreste der Vergangenheit erhalten geblieben sind + Tietyllä paikalla kiinteästi sijaitsevien jäännösten muodostama kokonaisuus + Plats där fornlämning påträffats + Объект материальной культуры, несущий в себе определённый объём информации о прошлом + Local onde se concentram vestígios arquelógicos + Τοποθεσία στην οποία βρίσκονται ενδείξεις ανθρώπινης δραστηριότητας του παρελθόντος + Place (or group of physical sites) in which evidence of past activity is preserved + Mjesto na kojemu postoji veća količina sačuvanih izrađevina i tvorevina iz prošlosti + Luogo in cui si conservano tracce dell'attività umana del passato + Lieu ou groupe de sites physiques où sont préservées des preuves de l'activité préhistorique, + historique ou contemporaine + + Lugar donde se concentran vestigios arqueológicos + Místo nebo skupina míst, kde jsou zachovány důkazy a pozůstatky historické aktivity + Historiaurreko, historiako edo gaur egungo jardueraren froga materialak gordetzen dituen leku edo + gune fisikoen multzoa + + Lloc on es conserven vestigis arqueològics + Plaats waar men archeologische vondsten heeft gedaan + Archäologische Stätte + Археологическое место + Arkeologinen kohde + Sítio arqueológico + Археологически обект + Archeologinė vieta + Arheoloģiskais piemineklis + Arheološko nalazište + Site archéologique + Régészeti lelőhely + Archeologická lokalita + Arheološko najdišče + Suíomh seandálaíochta + Jaciment arqueològic + Arkeologisk lokal + Αρχαιολογική θέση + Archaeological site + Sito archeologico + Yacimiento arqueológico + Muistis + Arkeologia-aztarnategi + Archeologická lokalita + Stanowisko archeologiczne + Sit arheologic + Arkæologisk område + Archeologische vindplaats + + + + + + + + + + + + + + CMC_HA/2255 + eng + + true + + + + + + Dive into the heart of Spain's vibrant cultural landscape with this mesmerizing video. Explore + the electrifying atmosphere of La Tomatina in Buñol, the haunting beauty of Semana Santa processions in Seville, and the + thrilling bull runs of San Fermín in Pamplona. Revel in the passionate Flamenco performances, savor the diverse flavors of + Spanish cuisine, and discover the deep-rooted traditions that weave through Spain's history. This video showcases the + colorful, spirited, and deeply traditional festivals that define Spanish culture + + mov + http://3dicons.dcu.gr/object/HA/1255 + English + + CMC Associates + CMC + + + Settlement + Panorama Movie of link path 1-5, Skara Brae + Movie + 3D ICONS + + Neolithic + + false + + + VIDEO + + + + Europeana Foundation + Europeana Foundation + 307_local_31072024_1640 + Netherlands + nl + 10 + + + Europeana + + + + + + + + diff --git a/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample_II.xml b/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample_II.xml new file mode 100644 index 0000000000..3c636cc532 --- /dev/null +++ b/metis-media-service/src/test/resources/__files/rdf_with_oembed_sample_II.xml @@ -0,0 +1,196 @@ + + + + + Quicktime interactive panorama of the link path between houses 1 and 8 at Skara Brae + mov + © CMC Associates + CMC + + + + + Quicktime interactive panorama of the link path between houses 1 and 5 at Skara Brae + mov + © CMC Associates + CMC + + + + + 59.04861 + -3.343056 + 7.0 + Skara Brae + + + Neolithic + -3180 + -2500 + + + + + village + dorp + Dörfer + vila + aldea + + Distinctions among villages, towns, and cities are relative and vary according to their individual regional contexts. Villages generally designate units of compact settlement, varying in size but usually larger than hamlets and smaller than towns and distinguished from the surrounding rural territory. + Diferencias entre aldeas, pueblos y ciudades son relativas y varían según su contexo regional individual. Por lo general, las aldeas son unidades de asentamiento compacto, variables en tamaño, siendo comúnmente más grandes que los caseríos y más pequeñas que los pueblos, y que aparecen diferenciadas en el territorio rural circundante. + Het onderscheid tussen dorp, kleine stad en grote stad is betrekkelijk en varieert naar gelang de individuele, regionale context. Dorp is meestal een aanduiding voor een compacte nederzetting, die kan variëren in grootte, maar meestal groter is dan een buurtschap en kleiner dan een stad en die zich duidelijk onderscheidt van het landelijke gebied eromheen. + Die Unterscheidung zwischen Dorf, Kleinstadt und Stadt ist immer relativ und schwankt entsprechend ihrer individuellen, regionalen Kontexte. Ein Dorf bezeichnet im Allgemeinen eine geschlossene, in der Größe variierende Siedlung, ist aber größer als ein Weiler und kleiner als ein Städtchen und unterscheidet sich von der ländlichen Umgebung. + As distinções entre aldeias, vilas e cidades são relativas e variam de acordo com seus contextos regionais individuais. As vilas geralmente designam unidades de assentamento compacto, variando em tamanho, mas geralmente maiores do que aldeias e menores do que cidades e distintas do território rural circundante. + villages + dorpen + Dorf + vilas + aldeas + + + archaeological site + archeological sites + sites, archaeological + sites, archeological + archeologische site + archeologische vindplaatsen + sítio arqueológico + sitio arqueológico + yacimiento arqueológico + + Locations where human activities once took place and some form of material evidence has been left behind, particularly sites where evidence of past activity is being or has been investigated using the discipline of archaeology. + Designa un lugar con evidencias de actividad humana. + Locaties waar ooit menselijke activiteiten hebben plaatsgevonden en waar materiële resten, in wat voor vorm dan ook, zijn achtergebleven. + Locais onde as atividades humanas ocorreram e alguma forma de evidência material foi deixada para trás, particularmente locais onde evidências de atividades anteriores estão sendo ou foram investigadas usando a disciplina de arqueologia. + archaeological sites + archeologische sites + sítios arqueológicos + sitios arqueológicos + + + + + + + + + + + + + + + + Ort, an dem Überreste der Vergangenheit erhalten geblieben sind + Tietyllä paikalla kiinteästi sijaitsevien jäännösten muodostama kokonaisuus + Plats där fornlämning påträffats + Объект материальной культуры, несущий в себе определённый объём информации о прошлом + Local onde se concentram vestígios arquelógicos + Τοποθεσία στην οποία βρίσκονται ενδείξεις ανθρώπινης δραστηριότητας του παρελθόντος + Place (or group of physical sites) in which evidence of past activity is preserved + Mjesto na kojemu postoji veća količina sačuvanih izrađevina i tvorevina iz prošlosti + Luogo in cui si conservano tracce dell'attività umana del passato + Lieu ou groupe de sites physiques où sont préservées des preuves de l'activité préhistorique, historique ou contemporaine + Lugar donde se concentran vestigios arqueológicos + Místo nebo skupina míst, kde jsou zachovány důkazy a pozůstatky historické aktivity + Historiaurreko, historiako edo gaur egungo jardueraren froga materialak gordetzen dituen leku edo gune fisikoen multzoa + Lloc on es conserven vestigis arqueològics + Plaats waar men archeologische vondsten heeft gedaan + Archäologische Stätte + Археологическое место + Arkeologinen kohde + Sítio arqueológico + Археологически обект + Archeologinė vieta + Arheoloģiskais piemineklis + Arheološko nalazište + Site archéologique + Régészeti lelőhely + Archeologická lokalita + Arheološko najdišče + Suíomh seandálaíochta + Jaciment arqueològic + Arkeologisk lokal + Αρχαιολογική θέση + Archaeological site + Sito archeologico + Yacimiento arqueológico + Muistis + Arkeologia-aztarnategi + Archeologická lokalita + Stanowisko archeologiczne + Sit arheologic + Arkæologisk område + Archeologische vindplaats + + + + + CMC - CMC Associates Ltd. + + + + + + + + + CMC_HA/22558 + uz + + true + + + + + + Interactive quicktime panorama of link path 1-5.Skara Brae is an archaeological site with + exceptionally well preserved remains that show the stone built furniture and internal structure of ten Neolithic houses and + their narrow connecting passageways. Radio Carbon dates show that the village was occupied for around 600 years between 3200 + and 2200 BC. There appear to be two main structural phases to the occupation.Between the later houses at Skara Brae is a + network of low winding passages. These made it possible to travel between Houses 1-7 without going outside. The passages are + around a meter high and roofed with stone slabs. The main passage has bar-holes in the walls at either end suggesting that + it could have been sealed from the inside.There were further bar-holes at the entrances to each of the houses allowing them + to be closed and barred with stone slabs.The monument is now managed by Historic Scotland and is a key element of the Heart + of Neolithic Orkney World Heritage Site. + mov + http://3dicons.dcu.gr/object/HA/22558 + Uzbek + uzb + + CMC Associates + CMC + + + Settlement + Panorama Movie of link path 1-5, Skara Brae + Movie + 3D ICONS + + Neolithic + + false + + + VIDEO + + + + Europeana Foundation + Europeana Foundation + 321_local_10092024_1424 + Netherlands + nl + 10 + + + CARARE + + + + + + + + diff --git a/metis-media-service/src/test/resources/__files/test_oembed.json b/metis-media-service/src/test/resources/__files/test_oembed.json new file mode 100644 index 0000000000..05ef49c3f3 --- /dev/null +++ b/metis-media-service/src/test/resources/__files/test_oembed.json @@ -0,0 +1,23 @@ +{ + "type": "video", + "version": "1.0", + "provider_name": "Vimeo", + "provider_url": "https://vimeo.com/", + "title": "Mountain Water", + "author_name": "RhythmRancher", + "author_url": "https://vimeo.com/user8752833", + "is_plus": "0", + "account_type": "basic", + "html": "", + "width": 640, + "height": 360, + "duration": 199, + "description": "Mountain Water ~The snow is melting here in the Marble Mountain Wilderness zone and here is what it looks like when the water comes rushing down from the mountains into the valley. The look is enchanting and the sound is sublime :-) hope you will enjoy this video and feel the closeness to nature :-) wishing you many positive vibrations ~ peace ~\n\nPlease check my band page out @ rhythmrancher.com ~ studio gear 4 sale :)\nand also please visit my business page to find out about having healthy drinking water @ multipureusa.com/willandgemma/", + "thumbnail_url": "https://i.vimeocdn.com/video/298084091-2ceb89d13a3e6a4f786238488c4b16d598b89d6e7245cd2e24b6dbb1d6c326a3-d_640", + "thumbnail_width": 640, + "thumbnail_height": 360, + "thumbnail_url_with_play_button": "https://i.vimeocdn.com/filter/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F298084091-2ceb89d13a3e6a4f786238488c4b16d598b89d6e7245cd2e24b6dbb1d6c326a3-d_640&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png", + "upload_date": "2012-05-27 20:24:19", + "video_id": 42947250, + "uri": "/videos/42947250" +} diff --git a/metis-pattern-analysis/pom.xml b/metis-pattern-analysis/pom.xml index 2de3080547..44ce2a1131 100644 --- a/metis-pattern-analysis/pom.xml +++ b/metis-pattern-analysis/pom.xml @@ -3,7 +3,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 4.0.0 metis-pattern-analysis diff --git a/metis-pattern-analysis/src/test/java/eu/europeana/patternanalysis/ProblemPatternAnalyzerTest.java b/metis-pattern-analysis/src/test/java/eu/europeana/patternanalysis/ProblemPatternAnalyzerTest.java index 1d84a4189a..9aa6783ea6 100644 --- a/metis-pattern-analysis/src/test/java/eu/europeana/patternanalysis/ProblemPatternAnalyzerTest.java +++ b/metis-pattern-analysis/src/test/java/eu/europeana/patternanalysis/ProblemPatternAnalyzerTest.java @@ -74,7 +74,7 @@ private int getRequestedProblemOccurrencesSize(ProblemPatternDescription problem return problemPatterns.stream() .filter(problemPattern -> problemPattern.getProblemPatternDescription() == problemPatternDescription) - .map(problemPattern -> problemPattern.getRecordAnalysisList().get(0).getProblemOccurrenceList().size()) + .map(problemPattern -> problemPattern.getRecordAnalysisList().getFirst().getProblemOccurrenceList().size()) .findFirst().orElse(0); } diff --git a/metis-repository/metis-repository-rest/pom.xml b/metis-repository/metis-repository-rest/pom.xml index c76bc7bc23..a5ce5cf86c 100644 --- a/metis-repository/metis-repository-rest/pom.xml +++ b/metis-repository/metis-repository-rest/pom.xml @@ -4,7 +4,7 @@ metis-repository eu.europeana.metis - 12.2 + 13 4.0.0 metis-repository-rest diff --git a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/HttpHarvestController.java b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/HttpHarvestController.java index a124e279a0..cbabbe216a 100644 --- a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/HttpHarvestController.java +++ b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/HttpHarvestController.java @@ -22,7 +22,6 @@ import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.server.ResponseStatusException; @@ -53,7 +52,6 @@ public void setRecordDao(RecordDao recordDao) { */ @GetMapping(value = RestEndpoints.REPOSITORY_HTTP_ENDPOINT_ZIP, produces = "application/zip") @ResponseStatus(HttpStatus.OK) - @ResponseBody @ApiOperation(value = "The dataset is exported as a zip file for harvesting by Metis. Records " + "that are marked as deleted will be excluded from the resulting zip file.") @ApiResponses(value = {@ApiResponse(code = 404, message = "No records for this dataset."), diff --git a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/OaiPmhController.java b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/OaiPmhController.java index 15e0938284..f87cac5f46 100644 --- a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/OaiPmhController.java +++ b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/OaiPmhController.java @@ -61,6 +61,15 @@ public void setRecordDao(RecordDao recordDao) { this.recordDao = recordDao; } + /** + * Request an OAI-PMH ListIdentifiers or GetRecord given the required parameters. + * + * @param verb the verb, can be ListIdentifiers or GetRecord + * @param set the set + * @param metadataPrefix the metadata prefix + * @param identifier the identifier in case of GetRecord verb + * @return the response + */ @GetMapping(value = RestEndpoints.REPOSITORY_OAI_ENDPOINT, produces = {MediaType.APPLICATION_XML_VALUE}) @ResponseStatus(HttpStatus.OK) diff --git a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/RecordController.java b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/RecordController.java index ca30985273..66b2810fb8 100644 --- a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/RecordController.java +++ b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/controller/RecordController.java @@ -1,6 +1,7 @@ package eu.europeana.metis.repository.rest.controller; import eu.europeana.metis.harvesting.HarvesterException; +import eu.europeana.metis.harvesting.ReportingIteration.IterationResult; import eu.europeana.metis.harvesting.http.HttpHarvesterImpl; import eu.europeana.metis.repository.rest.dao.Record; import eu.europeana.metis.repository.rest.dao.RecordDao; @@ -54,8 +55,9 @@ public class RecordController { public static final String CONTROLLER_TAG_NAME = "RecordController"; private static final Logger LOGGER = LoggerFactory.getLogger(RecordController.class); - private static final Pattern UNSUPPORTED_CHARACTERS_PATTERN = Pattern.compile("[^a-zA-Z0-9_]"); + private static final Pattern UNSUPPORTED_CHARACTERS_PATTERN = Pattern.compile("\\W"); private static final String REPLACEMENT_CHARACTER = "_"; + public static final String NOT_FOUND_LOG_STRING = "No record found for this identifier."; private RecordDao recordDao; @@ -70,6 +72,7 @@ public void setRecordDao(RecordDao recordDao) { * @param recordId - A unique record id * @param datasetId - The id of the dataset which the record belongs to * @param dateStamp - Last time the record was updated. It can also be the date of creation + * @param markAsDeleted Mark record as deleted * @param edmRecord - The record itself * @return a summary of the performed actions. */ @@ -126,10 +129,14 @@ public InsertionResult saveRecords( final InsertionResult result = new InsertionResult(datasetId, Objects.requireNonNullElseGet(dateStamp, Instant::now)); try (final InputStream inputStream = recordsZipFile.getInputStream()) { - new HttpHarvesterImpl().harvestRecords(inputStream, CompressedFileExtension.ZIP, entry -> { - final byte[] content = entry.getEntryContent().readAllBytes(); - final String recordId = datasetId + "_" + FilenameUtils.getBaseName(entry.getEntryName()); + new HttpHarvesterImpl().harvestFullRecords(inputStream, CompressedFileExtension.ZIP, entry -> { + final byte[] content; + try (InputStream contentStream = entry.getContent()) { + content = contentStream.readAllBytes(); + } + final String recordId = datasetId + "_" + FilenameUtils.getBaseName(entry.getHarvestingIdentifier()); saveRecord(recordId, new String(content, StandardCharsets.UTF_8), result, false); + return IterationResult.CONTINUE; }); } catch (IOException | HarvesterException | RuntimeException e) { @@ -143,9 +150,10 @@ public InsertionResult saveRecords( /** * Update record header (metadata information) of the record given by the record ID. * - * @param recordId - A unique record id - * @param datasetId - The id of the dataset which the record belongs to - * @param dateStamp - Last time the record was updated. It can also be the date of creation + * @param recordId A unique record id + * @param datasetId The id of the dataset which the record belongs to + * @param dateStamp Last time the record was updated. It can also be the date of creation + * @param markAsDeleted Mark record as deleted * @return a summary of the performed actions. */ @PutMapping(value = RestEndpoints.REPOSITORY_RECORDS_RECORD_ID_HEADER, @@ -162,7 +170,7 @@ public InsertionResult updateRecordHeader( @ApiParam(value = "Whether the record is to be marked as deleted", required = true) @RequestParam("markAsDeleted") boolean markAsDeleted) { final Record oaiRecord = recordDao.getRecord(recordId); if (oaiRecord == null) { - throw new ResponseStatusException(HttpStatus.NOT_FOUND, "No record found for this identifier."); + throw new ResponseStatusException(HttpStatus.NOT_FOUND, NOT_FOUND_LOG_STRING); } return saveRecord(recordId, datasetId, dateStamp, markAsDeleted, oaiRecord.getEdmRecord()); } @@ -186,6 +194,11 @@ private void saveRecord(String providedRecordId, String edmRecord, InsertionResu } } + /** + * Get a record from the database using an identifier. + * @param recordId the record identifier + * @return the record + */ @GetMapping(value = RestEndpoints.REPOSITORY_RECORDS_RECORD_ID, produces = {MediaType.APPLICATION_XML_VALUE}) @ResponseStatus(HttpStatus.OK) @@ -196,12 +209,16 @@ public RecordView getRecord( @ApiParam(value = "Record ID", required = true) @PathVariable("recordId") String recordId) { final Record oaiRecord = recordDao.getRecord(recordId); if (oaiRecord == null) { - throw new ResponseStatusException(HttpStatus.NOT_FOUND, "No record found for this identifier."); + throw new ResponseStatusException(HttpStatus.NOT_FOUND, NOT_FOUND_LOG_STRING); } return new RecordView(oaiRecord.getRecordId(), oaiRecord.getDatasetId(), oaiRecord.getDateStamp(), oaiRecord.isDeleted(), oaiRecord.getEdmRecord()); } + /** + * Delete a record from the database given a record identifier. + * @param recordId the record identifier + */ @DeleteMapping(value = RestEndpoints.REPOSITORY_RECORDS_RECORD_ID) @ResponseStatus(HttpStatus.OK) @ApiOperation(value = "The record is deleted from the database. Note: this is not the same as " @@ -211,7 +228,7 @@ public RecordView getRecord( public void deleteRecord( @ApiParam(value = "Record ID", required = true) @PathVariable("recordId") String recordId) { if (!recordDao.deleteRecord(recordId)) { - throw new ResponseStatusException(HttpStatus.NOT_FOUND, "No record found for this identifier."); + throw new ResponseStatusException(HttpStatus.NOT_FOUND, NOT_FOUND_LOG_STRING); } } diff --git a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/dao/RecordDao.java b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/dao/RecordDao.java index 80dc024503..54dc7ab0f8 100644 --- a/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/dao/RecordDao.java +++ b/metis-repository/metis-repository-rest/src/main/java/eu/europeana/metis/repository/rest/dao/RecordDao.java @@ -1,6 +1,6 @@ package eu.europeana.metis.repository.rest.dao; -import static eu.europeana.metis.utils.CommonStringValues.CRLF_PATTERN; +import static eu.europeana.metis.utils.CommonStringValues.sanitizeCRLF; import com.mongodb.client.MongoClient; import dev.morphia.Datastore; @@ -57,8 +57,7 @@ public boolean createRecord(Record providedRecord) { ExternalRequestUtil.retryableExternalRequestForNetworkExceptions( () -> datastore.save(providedRecord)); if (LOGGER.isInfoEnabled()) { - LOGGER.info("Record for datasetId '{}' created in Mongo", - CRLF_PATTERN.matcher(providedRecord.getDatasetId()).replaceAll("")); + LOGGER.info("Record for datasetId '{}' created in Mongo", sanitizeCRLF(providedRecord.getDatasetId())); } return recordFound.isEmpty(); @@ -90,7 +89,7 @@ public Record getRecord(String recordId) { return recordFound.get(); } else { if (LOGGER.isWarnEnabled()) { - LOGGER.warn("There is no such record with id {}.", CRLF_PATTERN.matcher(recordId).replaceAll("")); + LOGGER.warn("There is no such record with id {}.", sanitizeCRLF(recordId)); } return null; } @@ -106,7 +105,7 @@ public boolean deleteRecord(String recordId) { final boolean isDeleted = datastore.find(Record.class) .filter(Filters.eq(RECORD_ID_FIELD, recordId)).delete().getDeletedCount() > 0; if (!isDeleted) { - LOGGER.warn("There is no such record with id {}.", recordId); + LOGGER.warn("There is no such record with id {}.", sanitizeCRLF(recordId)); } return isDeleted; } diff --git a/metis-repository/pom.xml b/metis-repository/pom.xml index 8a9858619b..d7458b6b13 100644 --- a/metis-repository/pom.xml +++ b/metis-repository/pom.xml @@ -4,7 +4,7 @@ 4.0.0 eu.europeana.metis - 12.2 + 13 metis-framework diff --git a/metis-transformation/metis-transformation-service/pom.xml b/metis-transformation/metis-transformation-service/pom.xml index 4c77da77a1..13d4dd0a83 100644 --- a/metis-transformation/metis-transformation-service/pom.xml +++ b/metis-transformation/metis-transformation-service/pom.xml @@ -4,7 +4,7 @@ metis-transformation eu.europeana.metis - 12.2 + 13 metis-transformation-service diff --git a/metis-transformation/pom.xml b/metis-transformation/pom.xml index 5a38f40d2a..69d49b13f8 100644 --- a/metis-transformation/pom.xml +++ b/metis-transformation/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-transformation pom diff --git a/metis-validation/metis-validation-common/pom.xml b/metis-validation/metis-validation-common/pom.xml index d11118aad6..5b4d765bfb 100644 --- a/metis-validation/metis-validation-common/pom.xml +++ b/metis-validation/metis-validation-common/pom.xml @@ -4,7 +4,7 @@ metis-validation eu.europeana.metis - 12.2 + 13 metis-validation-common diff --git a/metis-validation/metis-validation-service/pom.xml b/metis-validation/metis-validation-service/pom.xml index 890239a399..7b060ef477 100644 --- a/metis-validation/metis-validation-service/pom.xml +++ b/metis-validation/metis-validation-service/pom.xml @@ -4,7 +4,7 @@ metis-validation eu.europeana.metis - 12.2 + 13 metis-validation-service diff --git a/metis-validation/metis-validation-service/src/main/java/eu/europeana/validation/service/EDMParser.java b/metis-validation/metis-validation-service/src/main/java/eu/europeana/validation/service/EDMParser.java index 0991d27b98..f6745b5865 100644 --- a/metis-validation/metis-validation-service/src/main/java/eu/europeana/validation/service/EDMParser.java +++ b/metis-validation/metis-validation-service/src/main/java/eu/europeana/validation/service/EDMParser.java @@ -1,6 +1,7 @@ package eu.europeana.validation.service; import java.io.IOException; +import java.io.Serial; import java.nio.file.Files; import java.nio.file.Paths; import java.util.concurrent.ConcurrentHashMap; @@ -23,7 +24,7 @@ */ final class EDMParser { - private static EDMParser p; + private static EDMParser edmParser; private static final ConcurrentMap CACHE = new ConcurrentHashMap<>(); private static final DocumentBuilderFactory PARSE_FACTORY = DocumentBuilderFactory.newInstance(); private static final Logger LOGGER = LoggerFactory.getLogger(EDMParser.class); @@ -61,40 +62,41 @@ public DocumentBuilder getEdmParser() throws EDMParseSetupException { /** * Get a JAXP schema validator (singleton) * - * @param path The path location of the schema. This has to be a sanitized input otherwise the - * method could become unsecure. + * @param path The path location of the schema. This has to be a sanitized input otherwise the method could become unsecure. * @param resolver the resolver used for the schema * @return JAXP schema validator. * @throws EDMParseSetupException In case the validator could not be created. */ public Validator getEdmValidator(String path, LSResourceResolver resolver) - throws EDMParseSetupException { - try { - // False positive. The parser has all security settings applied (see getSchema). - @SuppressWarnings("squid:S2755") - final Validator result = getSchema(path, resolver).newValidator(); - return result; - } catch (SAXException | IOException e) { - throw new EDMParseSetupException("Unable to create validator", e); - } + throws EDMParseSetupException { + // False positive. The parser has all security settings applied (see getSchema). + @SuppressWarnings("squid:S2755") + final Validator result = getSchema(path, resolver).newValidator(); + return result; } - private Schema getSchema(String path, LSResourceResolver resolver) - throws SAXException, IOException { - - if (!CACHE.containsKey(path)) { - SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); - factory.setResourceResolver(resolver); - factory.setFeature("http://apache.org/xml/features/validation/schema-full-checking", - false); - factory.setFeature("http://apache.org/xml/features/honour-all-schemaLocations", true); - //Protection from XXE - factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); - factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); - Schema schema = factory.newSchema(new StreamSource(Files.newInputStream(Paths.get(path)))); - CACHE.put(path, schema); + private Schema getSchema(String path, LSResourceResolver resolver) throws EDMParseSetupException { + + final Schema schema; + try { + schema = CACHE.computeIfAbsent(path, s -> { + try { + SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); + factory.setResourceResolver(resolver); + factory.setFeature("http://apache.org/xml/features/validation/schema-full-checking", false); + factory.setFeature("http://apache.org/xml/features/honour-all-schemaLocations", true); + // Protection from XXE + factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + return factory.newSchema(new StreamSource(Files.newInputStream(Paths.get(s)))); + } catch (SAXException | IOException e) { + throw new RuntimeEDMParseSetupException("Failed to create schema for path: " + s, e); + } + }); + } catch (RuntimeEDMParseSetupException e) { + throw new EDMParseSetupException(e.getMessage(), e); } - return CACHE.get(path); + return schema; } /** @@ -104,10 +106,19 @@ private Schema getSchema(String path, LSResourceResolver resolver) */ public static EDMParser getInstance() { synchronized (EDMParser.class) { - if (p == null) { - p = new EDMParser(); + if (edmParser == null) { + edmParser = new EDMParser(); } - return p; + return edmParser; + } + } + + private static class RuntimeEDMParseSetupException extends RuntimeException { + + @Serial private static final long serialVersionUID = 6802348788522122630L; + + RuntimeEDMParseSetupException(String message, Throwable cause) { + super(message, cause); } } @@ -116,7 +127,7 @@ public static EDMParser getInstance() { */ public static class EDMParseSetupException extends Exception { - private static final long serialVersionUID = 3854029647081914787L; + @Serial private static final long serialVersionUID = 3854029647081914787L; EDMParseSetupException(String message, Throwable cause) { super(message, cause); diff --git a/metis-validation/metis-validation-service/src/test/java/TestApplication.java b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestApplication.java similarity index 81% rename from metis-validation/metis-validation-service/src/test/java/TestApplication.java rename to metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestApplication.java index 52db600d17..962673b1df 100644 --- a/metis-validation/metis-validation-service/src/test/java/TestApplication.java +++ b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestApplication.java @@ -1,17 +1,11 @@ +package eu.europeana.validation.service; + import eu.europeana.metis.network.NetworkUtil; -import eu.europeana.validation.service.ClasspathResourceResolver; -import eu.europeana.validation.service.PredefinedSchemas; -import eu.europeana.validation.service.SchemaProvider; -import eu.europeana.validation.service.ValidationExecutionService; -import eu.europeana.validation.service.ValidationServiceConfig; import java.io.IOException; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -/** - * Created by ymamakis on 7/14/16. - */ @Configuration public class TestApplication { diff --git a/metis-validation/metis-validation-service/src/test/java/TestSchemaProvider.java b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestSchemaProvider.java similarity index 95% rename from metis-validation/metis-validation-service/src/test/java/TestSchemaProvider.java rename to metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestSchemaProvider.java index 2ffa8c3ad1..9705e99cf4 100644 --- a/metis-validation/metis-validation-service/src/test/java/TestSchemaProvider.java +++ b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestSchemaProvider.java @@ -1,3 +1,5 @@ +package eu.europeana.validation.service; + import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.get; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; @@ -11,9 +13,6 @@ import com.github.tomakehurst.wiremock.WireMockServer; import eu.europeana.metis.network.NetworkUtil; import eu.europeana.validation.model.Schema; -import eu.europeana.validation.service.PredefinedSchemas; -import eu.europeana.validation.service.SchemaProvider; -import eu.europeana.validation.service.SchemaProviderException; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; @@ -21,9 +20,6 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -/** - * Created by pwozniak on 12/21/17 - */ class TestSchemaProvider { private static int portForWireMock = 9999; @@ -187,8 +183,8 @@ void zipFileShouldBeDownloadedWhenNotAvailable() } private void clearSchemasDir() throws IOException { - String TMP_DIR = System.getProperty("java.io.tmpdir"); - File schemasDirectory = new File(TMP_DIR, "schemas"); + String tmpDir = System.getProperty("java.io.tmpdir"); + File schemasDirectory = new File(tmpDir, "schemas"); FileUtils.deleteDirectory(schemasDirectory); schemasDirectory.mkdirs(); } diff --git a/metis-validation/metis-validation-service/src/test/java/TestValidationExecution.java b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestValidationExecution.java similarity index 95% rename from metis-validation/metis-validation-service/src/test/java/TestValidationExecution.java rename to metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestValidationExecution.java index 85f045c1a4..449250647b 100644 --- a/metis-validation/metis-validation-service/src/test/java/TestValidationExecution.java +++ b/metis-validation/metis-validation-service/src/test/java/eu/europeana/validation/service/TestValidationExecution.java @@ -1,3 +1,5 @@ +package eu.europeana.validation.service; + import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.get; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; @@ -11,10 +13,6 @@ import com.github.tomakehurst.wiremock.WireMockServer; import eu.europeana.validation.model.ValidationResult; import eu.europeana.validation.model.ValidationResultList; -import eu.europeana.validation.service.ClasspathResourceResolver; -import eu.europeana.validation.service.PredefinedSchemas; -import eu.europeana.validation.service.SchemaProvider; -import eu.europeana.validation.service.ValidationExecutionService; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; @@ -40,9 +38,6 @@ import org.springframework.test.context.support.AnnotationConfigContextLoader; import org.springframework.test.context.web.WebAppConfiguration; -/** - * Created by gmamakis on 18-12-15. - */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = TestApplication.class, loader = AnnotationConfigContextLoader.class) @WebAppConfiguration @@ -288,12 +283,12 @@ private Properties loadDefaultProperties(String propertyFile) { @Test void ValidationExecutionServiceTestWithProvidedProperties() { Properties property = loadDefaultProperties("src/test/resources/custom-validation.properties"); - ValidationExecutionService validationExecutionService = new ValidationExecutionService( + ValidationExecutionService validationExecutionServiceWithProvidedProperties = new ValidationExecutionService( property); - ExecutorService es = Whitebox.getInternalState(validationExecutionService, "es"); + ExecutorService es = Whitebox.getInternalState(validationExecutionServiceWithProvidedProperties, "es"); assertNotNull(es); SchemaProvider schemaProvider = Whitebox - .getInternalState(validationExecutionService, "schemaProvider"); + .getInternalState(validationExecutionServiceWithProvidedProperties, "schemaProvider"); Properties properties = loadDefaultProperties( "src/test/resources/custom-validation.properties"); assertNotNull(schemaProvider); @@ -316,12 +311,12 @@ void ValidationExecutionServiceTestWithCustomConfiguration() { PredefinedSchemas predefinedSchemas = new PredefinedSchemas(); predefinedSchemas.add("name", "location", "root", "schematronFile"); predefinedSchemas.add("name1", "location1", "root1", "schematronFile1"); - ValidationExecutionService validationExecutionService = new ValidationExecutionService( + ValidationExecutionService validationExecutionServiceWithCustomConfig = new ValidationExecutionService( () -> 12, new ClasspathResourceResolver(), new SchemaProvider(predefinedSchemas)); - ExecutorService es = Whitebox.getInternalState(validationExecutionService, "es"); + ExecutorService es = Whitebox.getInternalState(validationExecutionServiceWithCustomConfig, "es"); assertNotNull(es); SchemaProvider schemaProvider = Whitebox - .getInternalState(validationExecutionService, "schemaProvider"); + .getInternalState(validationExecutionServiceWithCustomConfig, "schemaProvider"); assertNotNull(schemaProvider); PredefinedSchemas locations = Whitebox .getInternalState(schemaProvider, "predefinedSchemasLocations"); diff --git a/metis-validation/pom.xml b/metis-validation/pom.xml index 8682db2574..d0ddb6ea1e 100644 --- a/metis-validation/pom.xml +++ b/metis-validation/pom.xml @@ -4,7 +4,7 @@ metis-framework eu.europeana.metis - 12.2 + 13 metis-validation pom diff --git a/pom.xml b/pom.xml index 66c4ab2b5c..d561793803 100644 --- a/pom.xml +++ b/pom.xml @@ -5,12 +5,11 @@ eu.europeana.metis metis-framework - 12.2 + 13 pom metis-common - metis-core metis-indexing metis-dereference metis-validation @@ -21,9 +20,10 @@ metis-harvesting metis-repository metis-pattern-analysis + metis-debias - + scm:git:https://github.com/europeana/metis-framework https://github.com/europeana/metis-framework @@ -55,50 +55,6 @@ 21 - - - directory-maven-plugin - - - - main.basedir - - - highest-basedir - - directories - initialize - - - org.commonjava.maven.plugins - ${version.directory.maven.plugin} - - - - build-helper-maven-plugin - - - - - - ${main.basedir}/metis-schema/target/eu/europeana/metis/schema/jibx - - - - add-source - - add-source - generate-sources - - - org.codehaus.mojo - ${version.build.helper.maven.plugin} - @@ -121,13 +77,12 @@ UTF-8 - 12-SNAPSHOT - 10-SNAPSHOT + 10 2.9.0 5.3.1 1.4 - 2.9.0 + 2.14.0 3.12.0 4.4 4.2.0 @@ -136,13 +91,9 @@ 1.16.1 1.10.0 1.26.0 - 2.16.8 - - - 1.0 + 2.16.10-SNAPSHOT 5.2.0 - - 4.12.2 + 4.17.0 1.3 6.4.4.Final 42.7.2 @@ -172,19 +123,17 @@ 2.4.5 0.9 2.0.9 - 5.12.0 - 3.24.3 10.5 4.0.1 8.8.2 - 6.1.4 + 6.1.5 3.2.3 5.8.10 2.22.1 2.3.0 3.0.0 1.6.2 - 1.26 + 2.9.2 3.4.2 1.19.6 2.12.2 @@ -460,6 +409,10 @@ org.apache.logging.log4j * + + com.fasterxml.jackson.module + jackson-module-jaxb-annotations + @@ -634,7 +587,6 @@ slf4j-api ${version.slf4j} -