From 24173d7a0bb7a566d0df26325e8643bdbb0c59da Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 18 Dec 2023 15:46:36 +0100 Subject: [PATCH] continuous validation WIP --- .../oaf/utils/GraphCleaningFunctions.java | 4 +- .../java/eu/dnetlib/pace/util/UtilTest.java | 4 +- .../score/deserializers/BipProjectModel.java | 55 ++++- .../dhp-continuous-validation/pom.xml | 192 +----------------- .../ContinuousValidator.java | 89 ++------ .../utils/ParquetUtils.java | 101 --------- .../oozie_app/workflow.xml | 8 +- .../src/test/java/ReadParquetDataTest.java | 33 --- .../src/test/java/ReadResultsTest.java | 13 +- .../doiboost/orcid/OrcidClientTest.java | 11 +- pom.xml | 6 + 11 files changed, 101 insertions(+), 415 deletions(-) delete mode 100644 dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/utils/ParquetUtils.java delete mode 100644 dhp-workflows/dhp-continuous-validation/src/test/java/ReadParquetDataTest.java diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java index 8700ea527..0124e96fc 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java @@ -689,7 +689,9 @@ public class GraphCleaningFunctions extends CleaningFunctions { .filter(Objects::nonNull) .filter(p -> Objects.nonNull(p.getQualifier())) .filter(p -> StringUtils.isNotBlank(p.getValue())) - .filter(p -> StringUtils.contains(StringUtils.lowerCase(p.getQualifier().getClassid()), ORCID)) + .filter( + p -> StringUtils + .contains(StringUtils.lowerCase(p.getQualifier().getClassid()), ORCID)) .map(p -> { // hack to distinguish orcid from orcid_pending String pidProvenance = getProvenance(p.getDataInfo()); diff --git a/dhp-pace-core/src/test/java/eu/dnetlib/pace/util/UtilTest.java b/dhp-pace-core/src/test/java/eu/dnetlib/pace/util/UtilTest.java index 6056c342d..93db552c1 100644 --- a/dhp-pace-core/src/test/java/eu/dnetlib/pace/util/UtilTest.java +++ b/dhp-pace-core/src/test/java/eu/dnetlib/pace/util/UtilTest.java @@ -7,10 +7,10 @@ import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import eu.dnetlib.pace.model.Person; -import jdk.nashorn.internal.ir.annotations.Ignore; public class UtilTest { @@ -22,7 +22,7 @@ public class UtilTest { } @Test - @Ignore + @Disabled public void paceResolverTest() { PaceResolver paceResolver = new PaceResolver(); paceResolver.getComparator("keywordMatch", params); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipmodel/score/deserializers/BipProjectModel.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipmodel/score/deserializers/BipProjectModel.java index 680e12504..944a5281e 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipmodel/score/deserializers/BipProjectModel.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipmodel/score/deserializers/BipProjectModel.java @@ -19,10 +19,7 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -@NoArgsConstructor -@AllArgsConstructor -@Getter -@Setter + public class BipProjectModel { String projectId; @@ -34,6 +31,17 @@ public class BipProjectModel { String totalCitationCount; + public BipProjectModel() { + } + + public BipProjectModel(String projectId, String numOfInfluentialResults, String numOfPopularResults, String totalImpulse, String totalCitationCount) { + this.projectId = projectId; + this.numOfInfluentialResults = numOfInfluentialResults; + this.numOfPopularResults = numOfPopularResults; + this.totalImpulse = totalImpulse; + this.totalCitationCount = totalCitationCount; + } + // each project bip measure has exactly one value, hence one key-value pair private Measure createMeasure(String measureId, String measureValue) { @@ -71,4 +79,43 @@ public class BipProjectModel { createMeasure("totalCitationCount", totalCitationCount)); } + public String getProjectId() { + return projectId; + } + + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + public String getNumOfInfluentialResults() { + return numOfInfluentialResults; + } + + public void setNumOfInfluentialResults(String numOfInfluentialResults) { + this.numOfInfluentialResults = numOfInfluentialResults; + } + + public String getNumOfPopularResults() { + return numOfPopularResults; + } + + public void setNumOfPopularResults(String numOfPopularResults) { + this.numOfPopularResults = numOfPopularResults; + } + + public String getTotalImpulse() { + return totalImpulse; + } + + public void setTotalImpulse(String totalImpulse) { + this.totalImpulse = totalImpulse; + } + + public String getTotalCitationCount() { + return totalCitationCount; + } + + public void setTotalCitationCount(String totalCitationCount) { + this.totalCitationCount = totalCitationCount; + } } diff --git a/dhp-workflows/dhp-continuous-validation/pom.xml b/dhp-workflows/dhp-continuous-validation/pom.xml index 85154848a..1a834e146 100644 --- a/dhp-workflows/dhp-continuous-validation/pom.xml +++ b/dhp-workflows/dhp-continuous-validation/pom.xml @@ -14,11 +14,7 @@ dhp-continuous-validation - - 8 - 8 - UTF-8 - + @@ -29,40 +25,11 @@ eu.dnetlib uoa-validator-engine2 - 0.9.0 - org.apache.spark spark-core_${scala.binary.version} - ${dhp.spark.version} - provided - - - - - - - ch.qos.logback - logback-classic - - - org.apache.logging.log4j - log4j-api - - - org.slf4j - slf4j-log4j12 - - - org.slf4j - slf4j-api - - @@ -70,181 +37,28 @@ org.apache.spark spark-sql_${scala.binary.version} - ${dhp.spark.version} - provided - - - - - com.twitter - parquet-format - - - + - - org.apache.hadoop - hadoop-common - ${dhp.hadoop.version} - provided - - - - org.apache.parquet - parquet-avro - - - org.apache.avro - avro - - - org.slf4j - slf4j-api - - - org.slf4j - slf4j-reload4j - - - ch.qos.reload4j - reload4j - - - - - com.google.protobuf - protobuf-java - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - com.fasterxml.woodstox - woodstox-core - - - - - - - - - org.apache.hadoop - hadoop-mapreduce-client-app - ${dhp.hadoop.version} - provided - - - - org.apache.avro - avro - - - org.slf4j - slf4j-api - - - org.slf4j - slf4j-log4j12 - - - - - com.google.protobuf - protobuf-java - - - io.netty - netty - - - - - - - - - org.apache.thrift libthrift - 0.17.0 com.fasterxml.woodstox woodstox-core - 6.5.1 - +--> com.google.code.gson gson - ${google.gson.version} - - - - - ch.qos.logback - logback-core - 1.2.13 - - - - - org.slf4j - slf4j-api - 1.7.36 - - - - - ch.qos.logback - logback-classic - 1.2.13 - - - - - org.junit.jupiter - junit-jupiter-engine - 5.10.1 - test - - - - libs - file:///${project.basedir}/libs - - - \ No newline at end of file diff --git a/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/ContinuousValidator.java b/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/ContinuousValidator.java index 55d4b904b..f1b02bc62 100644 --- a/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/ContinuousValidator.java +++ b/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/ContinuousValidator.java @@ -1,10 +1,13 @@ package eu.dnetlib.dhp.continuous_validator; -import eu.dnetlib.dhp.application.ArgumentApplicationParser; -import eu.dnetlib.validator2.validation.XMLApplicationProfile; -import eu.dnetlib.validator2.validation.guideline.openaire.*; -import eu.dnetlib.validator2.validation.utils.TestUtils; +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.function.MapFunction; @@ -13,15 +16,13 @@ import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; import org.slf4j.LoggerFactory; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.validator2.validation.XMLApplicationProfile; +import eu.dnetlib.validator2.validation.guideline.openaire.*; +import eu.dnetlib.validator2.validation.utils.TestUtils; import scala.Option; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; - public class ContinuousValidator { public static final String TEST_FILES_V4_DIR = TestUtils.TEST_FILES_BASE_DIR + "openaireguidelinesV4/"; @@ -74,7 +75,8 @@ public class ContinuousValidator { guidelines = parser.get("openaire_guidelines"); if (guidelines == null) { - logger.error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile); + logger + .error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile); return; } @@ -135,17 +137,6 @@ public class ContinuousValidator { String finalOutputPath = outputPath; runWithSparkSession(conf, isSparkSessionManaged, spark -> { - Dataset parquetFileDF = spark.read().parquet(finalParquet_file_path); - parquetFileDF.show(5); - - // Filter the results based on the XML-encoding and non-null id and body. - parquetFileDF = parquetFileDF - .filter( - parquetFileDF - .col("encoding") - .eqNullSafe("XML") - .and(parquetFileDF.col("id").isNotNull()) - .and(parquetFileDF.col("body").isNotNull())); // Use a new instance of Document Builder in each worker, as it is not thread-safe. MapFunction validateMapFunction = row -> profile @@ -155,58 +146,16 @@ public class ContinuousValidator { .getDocumentBuilder() .parse(IOUtils.toInputStream(row.getAs("body").toString(), StandardCharsets.UTF_8))); - Dataset validationResultsDataset = parquetFileDF - .map(validateMapFunction, Encoders.bean(XMLApplicationProfile.ValidationResult.class)); - - if (logger.isTraceEnabled()) { - logger.trace("Showing a few validation-results.. just for checking"); - validationResultsDataset.show(5); - } - - // Write the results to json file immediately, without converting them to a list. - validationResultsDataset + spark + .read() + .parquet(finalParquet_file_path) + .filter("encoding = 'XML' and id != NULL and body != null") + .map(validateMapFunction, Encoders.bean(XMLApplicationProfile.ValidationResult.class)) .write() .option("compression", "gzip") .mode(SaveMode.Overwrite) .json(finalOutputPath + RESULTS_FILE_NAME); // The filename should be the name of the input-file or the - // input-directory. - if (logger.isTraceEnabled()) { - List validationResultsList = validationResultsDataset - .javaRDD() - .collect(); - - if (validationResultsList.isEmpty()) { - logger.error("The \"validationResultsList\" was empty!"); - return; - } - - validationResultsList.forEach(vr -> logger.trace(vr.id() + " | score:" + vr.score())); - for (XMLApplicationProfile.ValidationResult result : validationResultsList) - logger.trace(result.toString()); - } - - // TODO - REMOVE THIS WHEN THE WRITE FROM ABOVE IS OK - /* - * try (BufferedWriter writer = Files .newBufferedWriter(Paths.get(outputPath + RESULTS_FILE), - * StandardCharsets.UTF_8)) { writer.write(new Gson().toJson(validationResultsList)); } catch (Exception e) - * { logger.error("Error when writing the \"validationResultsList\" as json into the results-file: " + - * outputPath + RESULTS_FILE); return; } - */ - - // TODO - Maybe the following section is not needed, when ran as an oozie workflow.. - Option uiWebUrl = spark.sparkContext().uiWebUrl(); - if (uiWebUrl.isDefined()) { - logger - .info( - "Waiting 60 seconds, before shutdown, for the user to check the jobs' status at: " - + uiWebUrl.get()); - try { - Thread.sleep(60_000); - } catch (InterruptedException ignored) { - } - } else - logger.info("The \"uiWebUrl\" is not defined, in order to check the jobs' status. Shutting down.."); }); } } diff --git a/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/utils/ParquetUtils.java b/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/utils/ParquetUtils.java deleted file mode 100644 index e36d82a65..000000000 --- a/dhp-workflows/dhp-continuous-validation/src/main/java/eu/dnetlib/dhp/continuous_validator/utils/ParquetUtils.java +++ /dev/null @@ -1,101 +0,0 @@ - -package eu.dnetlib.dhp.continuous_validator.utils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.avro.generic.GenericRecord; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.avro.AvroParquetReader; -import org.apache.parquet.hadoop.ParquetReader; -import org.apache.parquet.hadoop.util.HadoopInputFile; -import org.apache.parquet.io.InputFile; -import org.slf4j.LoggerFactory; - -public class ParquetUtils { - - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ParquetUtils.class); - - private static final Configuration parquetConfig = new Configuration(); - - public static List getParquetRecords(String fullFilePath) { - InputFile inputFile; - try { // TODO - Verify that this will create any directories which do not exist in the provided path. Currently - // we create the directories beforehand. - inputFile = HadoopInputFile.fromPath(new Path(fullFilePath), parquetConfig); - // logger.trace("Created the parquet " + outputFile); // DEBUG! - } catch (Throwable e) { // The simple "Exception" may not be thrown here, but an "Error" may be thrown. - // "Throwable" catches EVERYTHING! - logger.error("", e); - return null; - } - - List records = new ArrayList<>(); - GenericRecord record; - try (ParquetReader reader = AvroParquetReader. builder(inputFile).build()) { - while ((record = reader.read()) != null) { - records.add(record); - } - } catch (Throwable e) { // The simple "Exception" may not be thrown here, but an "Error" may be thrown. - // "Throwable" catches EVERYTHING! - logger.error("Problem when creating the \"ParquetWriter\" object or when writing the records with it!", e); - - // At some point, I got an "NoSuchMethodError", because of a problem in the AvroSchema file: - // (java.lang.NoSuchMethodError: org.apache.avro.Schema.getLogicalType()Lorg/apache/avro/LogicalType;). - // The error was with the schema: {"name": "date", "type" : ["null", {"type" : "long", "logicalType" : - // "timestamp-millis"}]}, - return null; - } - - return records; // It may be empty. - } - - public static Map getIdXmlMapFromParquetFile(String parquetFileFullPath) { - List recordList = ParquetUtils.getParquetRecords(parquetFileFullPath); - if (recordList == null) - return null; // The error is already logged. - else if (recordList.isEmpty()) { - logger.error("The parquet-file \"" + parquetFileFullPath + "\" had no records inside!"); - return null; - } - - Map idXmlMap = new HashMap<>(); - - for (GenericRecord record : recordList) { - if (logger.isTraceEnabled()) - logger.trace(record.toString()); - - Object id = record.get("id"); - if (id == null) - continue; - String idStr = id.toString(); - - Object encoding = record.get("encoding"); - if (encoding == null) { - logger.warn("Record with id = \"" + idStr + "\" does not provide the encoding for its body!"); - continue; - } - String encodingStr = encoding.toString(); - if (!encodingStr.equals("XML")) { - logger.warn("Record with id = \"" + idStr + "\" does not have XML encoding for its body!"); - continue; - } - - Object body = record.get("body"); - if (body == null) { - logger.warn("Record with id = \"" + idStr + "\" does not have a body!"); - continue; - } - String bodyStr = body.toString(); - - idXmlMap.put(idStr, bodyStr); - // logger.debug(idStr + " | " + idXmlMap.get(idStr)); - } - - return idXmlMap; - } - -} diff --git a/dhp-workflows/dhp-continuous-validation/src/main/resources/eu/dnetlib/dhp/continuous_validator/oozie_app/workflow.xml b/dhp-workflows/dhp-continuous-validation/src/main/resources/eu/dnetlib/dhp/continuous_validator/oozie_app/workflow.xml index 60c70bd14..7a99c76dc 100644 --- a/dhp-workflows/dhp-continuous-validation/src/main/resources/eu/dnetlib/dhp/continuous_validator/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-continuous-validation/src/main/resources/eu/dnetlib/dhp/continuous_validator/oozie_app/workflow.xml @@ -67,10 +67,10 @@ oozie.launcher.mapred.job.queue.name ${oozieLauncherQueueName} - + @@ -93,8 +93,8 @@ --executor-cores=${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} - + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.shuffle.partitions=3840 diff --git a/dhp-workflows/dhp-continuous-validation/src/test/java/ReadParquetDataTest.java b/dhp-workflows/dhp-continuous-validation/src/test/java/ReadParquetDataTest.java deleted file mode 100644 index 8c46777f0..000000000 --- a/dhp-workflows/dhp-continuous-validation/src/test/java/ReadParquetDataTest.java +++ /dev/null @@ -1,33 +0,0 @@ -import java.util.Map; - -import org.junit.jupiter.api.Test; -import org.slf4j.LoggerFactory; - -import eu.dnetlib.dhp.continuous_validator.utils.ParquetUtils; -import eu.dnetlib.validator2.validation.utils.TestUtils; - -public class ReadParquetDataTest { - - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ReadParquetDataTest.class); - - private static final String parquetFileFullPath = TestUtils.TEST_FILES_BASE_DIR - + "part-00589-733117df-3822-4fce-bded-17289cc5959a-c000.snappy.parquet"; - - public static void main(String[] args) { - - testParquetRead(); - } - - @Test - public static void testParquetRead() { - Map idXmlMap = ParquetUtils.getIdXmlMapFromParquetFile(parquetFileFullPath); - if (idXmlMap == null) { - logger.error("Could not create the \"idXmlMap\" from parquet-file: " + parquetFileFullPath); - System.exit(99); - } else if (idXmlMap.isEmpty()) - logger.warn("The generated \"idXmlMap\" was empty, for parquet-file: " + parquetFileFullPath); - else - logger.info("The \"idXmlMap\" was successfully generated, for parquet-file: " + parquetFileFullPath); - } - -} diff --git a/dhp-workflows/dhp-continuous-validation/src/test/java/ReadResultsTest.java b/dhp-workflows/dhp-continuous-validation/src/test/java/ReadResultsTest.java index 8384bfc96..c1bc48c82 100644 --- a/dhp-workflows/dhp-continuous-validation/src/test/java/ReadResultsTest.java +++ b/dhp-workflows/dhp-continuous-validation/src/test/java/ReadResultsTest.java @@ -1,12 +1,14 @@ -import com.google.gson.Gson; -import eu.dnetlib.dhp.continuous_validator.ContinuousValidator; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.util.List; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; + +import eu.dnetlib.dhp.continuous_validator.ContinuousValidator; + public class ReadResultsTest { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ContinuousValidator.class); @@ -25,7 +27,8 @@ public class ReadResultsTest { } catch (FileNotFoundException fnfe) { logger.error("The results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\" does not exist!"); } catch (Exception e) { - logger.error("Error when reading the json-results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\"", e); + logger + .error("Error when reading the json-results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\"", e); } } diff --git a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java index 70bbd066a..bf82c7257 100644 --- a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java +++ b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java @@ -30,7 +30,6 @@ import eu.dnetlib.dhp.common.collection.HttpClientParams; import eu.dnetlib.dhp.schema.orcid.AuthorData; import eu.dnetlib.doiboost.orcid.util.DownloadsReport; import eu.dnetlib.doiboost.orcid.util.MultiAttemptsHttpConnector; -import jdk.nashorn.internal.ir.annotations.Ignore; public class OrcidClientTest { final int REQ_LIMIT = 24; @@ -152,7 +151,7 @@ public class OrcidClientTest { } // @Test - @Ignore + @Disabled private void testModifiedDate() throws ParseException { testDate(toRetrieveDate); testDate(toNotRetrieveDate); @@ -332,7 +331,7 @@ public class OrcidClientTest { } @Test - @Ignore + @Disabled void testUpdatedRecord() throws Exception { final String base64CompressedRecord = IOUtils .toString(getClass().getResourceAsStream("0000-0001-7281-6306.compressed.base64")); @@ -341,7 +340,7 @@ public class OrcidClientTest { } @Test - @Ignore + @Disabled void testUpdatedWork() throws Exception { final String base64CompressedWork = "H4sIAAAAAAAAAM1XS2/jNhC+51cQOuxJsiXZSR03Vmq0G6Bo013E6R56oyXaZiOJWpKy4y783zvUg5Ksh5uiCJogisX5Zjj85sHx3f1rFKI94YKyeGE4I9tAJPZZQOPtwvj9+cGaGUhIHAc4ZDFZGEcijHvv6u7A+MtcPVCSSgsUQObYzuzaccBEguVuYYxt+LHgbwKP6a11M3WnY6UzrpB7KuiahlQeF0aSrkPqGwhcisWcxpLwGIcLYydlMh+PD4fDiHGfBvDcjmMxLhGlBglSH8vsIH0qGlLqBFRIGvvDWjWQ1iMJJ2CKBANqGlNqMbkj3IpxRPq1KkypFZFoDRHa0aRfq8JoNjhnfIAJJS6xPouiIQJyeYmGQzE+cO5cXqITcItBlKyASExD0a93jiwtvJDjYXDDAqBPHoH2wMmVWGNf8xyyaEBiSTeUDHHWBpd2Nmmc10yfbgHQrHCyIRxKjQwRUoFKPRwEnIgBnQJQVdGeQgJaCRN0OMnPkaUFVbD9WkpaIndQJowf+8EFoIpTErJjBFQOBavElFpfUxwC9ZcqvQErdQXhe+oPFF8BaObupYzVsYEOARzSoZBWmKqaBMHcV0Wf8oG0beIqD+Gdkz0lhyE3NajUW6fhQFSV9Nw/MCBYyofYa0EN7wrBz13eP+Y+J6obWgE8Pdd2JpYD94P77Ezmjj13b0bu5PqPu3EXumEnxEJaEVxSUIHammsra+53z44zt2/m1/bItaeVtQ6dhs3c4XytvW75IYUchMKvEHVUyqmnWBFAS0VJrqSvQde6vp251ux2NtFuKcVOi+oK9YY0M0Cn6o4J6WkvtEK2XJ1vfPGAZxSoK8lb+SxJBbLQx1CohOLndjJUywQWUFmqEi3G6Zaqf/7buOyYJd5IYpfmf0XipfP18pDR9cQCeEuJQI/Lx36bFbVnpBeL2UwmqQw7ApAvf4GeGGQdEbENgolui/wdpjHaYCmPCIPPAmGBIsxfoLUhyRCB0SeCakEBJRKBtfJ+UBbI15TG4PaGBAhWthx8DmFYtHZQujv1CWbLLdzmmUKmHEOWCe1/zdu78bn/+YH+hCOqOzcXfFwuP6OVT/P710crwqGXFrpNaM2GT3MXarw01i15TIi3pmtJXgtbTVGf3h6HKfF+wBAnPyTfdCChudlm5gZaoG//F9pPZsGQcqqbyZN5hBau5OoIJ3PPwjTKDuG4s5MZp2rMzF5PZoK34IT6PIFOPrk+mTiVO5aJH2C+JJRjE/06eoRfpJxa4VgyYaLlaJUv/EhCfATMU/76gEOfmehL/qbJNNHjaFna+CQYB8wvo9PpPFJ5MOrJ1Ix7USBZqBl7KRNOx1d3jex7SG6zuijqCMWRusBsncjZSrM2u82UJmqzpGhvUJN2t6caIM9QQgO9c0t40UROnWsJd2Rbs+nsxpna9u30ttNkjechmzHjEST+X5CkkuNY0GzQkzyFseAf7lSZuLwdh1xSXKvvQJ4g4abTYgPV7uMt3rskohlJmMa82kQkshtyBEIYqQ+YB8X3oRHg7iFKi/bZP+Ao+T6BJhIT/vNPi8ffZs+flk+r2v0WNroZiyWn6xRmadHqTJXsjLJczElAZX6TnJdoWTM1SI2gfutv3rjeBt5t06rVvNuWup29246tlvluO+u2/G92bK9DXheL6uFd/Q3EaRDZqBIAAA=="; final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork); @@ -413,7 +412,7 @@ public class OrcidClientTest { } @Test - @Ignore + @Disabled void testDownloadedAuthor() throws Exception { final String base64CompressedWork = "H4sIAAAAAAAAAI2Yy26jMBSG932KiD0hIe1MiwiVZjGLkWbX2XRHsFOsgs3YJmnefszFFy4+mUhtVPz9P/gcH/vQ9PWrrjYXzAVh9Bjst7tgg2nBEKEfx+DP28/wOdgImVOUV4ziY3DDInjNHlKOC8ZRMnxtmlyWxyDaqU+ofg7h/uX7IYwfn+Ngo25ARUKoxJzm1TEopWySKLper1vGC4LU74+IikgTWoFRW+SyfyyfxCBag4iQhBawyoGMDjdqJrnECJAZRquYLDEPaV5jv8oyWlXj+qTiXZLGr7KMiQbnjAOR6IY1W7C6hgIwjGt6SKGfHsY13ajHYipLIcIyJ5Xw6+akdvjEtyt4wxEwM6+VGph5N2zYr2ENhQRhKsmZYChmS1j7nFs6VIBPOwImKhyfMVeFg6GAWEjrcoQ4FoBmBGwVXYhagGHDBIEX+ZzUDiqyn35VN6rJUpUJ4zc/PAI2T03FbrUKJZQszWjV3zavVOjvVfoE01qB+YUUQPGNwHTt3luxJjdqh1AxJFBKLWOrSeCcF13RtxxYtlPOPqH6m+MLwVfoMQ2kdae2ArLajc6fTxkI1nIoegs0yB426pMO+0fSw07xDKMu0XKSde5C2VvrlVMijRzFwqY7XTJI1QMLWcmEzMxtDdxfHiYSgTNJnYJ1K9y5k0tUrMgrnGGaRiuXxxuClulYUbr0nBvpkYLjvgTCGsuSoex3f1CEvRPHKI184NJKtKeaiO7cD5E61bJ4F+9DFd7d01u8Tw6H5BBvvz8f3q3nXLGIeJULGdaqeVBBRK7rS7h/fNvvk/gpedxt4923dxP7Fc3KtKuc1BhlkrfYmeN4dcmrhmbw60+HmWw2CKgbTuqc32CXKTTmeTWT6bDBjPsQ0DTpnchdaYO0ayQ2FyLIiVREqs25aU8VKYLRbK0BsyZuqvr1MU2Sm/rDdhe/2CRN6FU/b+oBVyj1zqRtC5F8kAumfTclsl+s7EoNQu64nfOaVLeezX60Z3XCULLi6GI2IZGTEeey7fec9lBAuXawIHKcpifE7GABHWfoxLVfpUNPBXoMbZWrHFsR3bPAk9J9i2sw9nW6AQT1mpk++7JhW+v44Hmt8PomJqfD13jRnvFOSxCKtu6qHoyBbQ7cMFo750UEfGaXm6bEeplXIXj2hvL6mA7tzvIwmM9pbJFBG834POZdLGi2gH2u9u0K9HMwn5PTioFWLufzmrS4oNuU9Pkt2rf/2jMs7fMdm2rQTTM+j+49AzToAVuXYA1mD2k0+XdE9vAP+JYR5NcQAAA="; final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork); @@ -421,7 +420,7 @@ public class OrcidClientTest { } @Test - @Ignore + @Disabled void testDownloadedWork() throws Exception { final String base64CompressedWork = "H4sIAAAAAAAAANVa63LiOBb+z1Oo+LVbhbkGAlTCLE1Id9IhTQV6unr/CVvB2tiWR5Khmal5rX2BfbE9ki3b3Jzt6Y13h6pQSPrOXTo6knL10zffQxvCBWXBdbVVb1YRCWzm0GB9Xf28vLX6VSQkDhzssYBcV3dEVH8aVa62jL8M1RcKI2kBAYwNLnrtXrMPFCGW7nW10YSPBX8dq3XRb1swNGgomkaG3FBBV9SjcnddDaOVR+0qApUCMaSBJDzA3nXVlTIcNhrb7bbOuE0d+F43AtEwCENBnMjGUhtyjiSFGBqHCkkDu5gqB0rpSMgJsCJOAVmKMVRMuoRbAfbJeaoMY6h84q8gQi4Nz1NlmNQbnDNe4Ak1bLA28/0iB8TjBg1GMV5gdzxu0CGoxSBKlkMkpp44T3eINBxeyG5bKDABpJb7QF1guRpOsd/iOWRRhwSSPlNS5LNjsOHzHAXxmjlHmwBSr3DyTDgsNVLkkAxk6LDjcCIKaBJAtoo2FCagFTJBiyf5IdJwUAv2PJUaNUgXlgnju/PgBJDFKfTYzgdXFgXLYAzVLxH2wPWvrfQ9mKEVhG+oXbD4EsD+3H1txqaxgQwBPqRFIc0w2WoSBHNbLfqIF0zbfVymIbQ52VCyLVIzBRm6VeQVRFWNHuoHDASLeJH3jqDVUQXB5yrOH0ObE5UNLQe+R+1mu2U1u1Z7sGy2hq3esN2tt5oXf79qnELv8fGwkJYPmxSswD1uA6vVXrY7w+5g2G3WuxedjNsJmj2escJx33G/ZXsU5iAs/AyRR0WcjpRXBLglc0lM1BjP59bX1qw9Hn/+dH87/dy9vBikeinKkyzVHjoqJNWIk7QuE3KU6pES6O7MwsarJh44QW1KowcWOCxAC9tlzEPsGX3YrYGQICgS0JKzENach2bEoTYNyKEQzaJyQnzSqesKSaV3IhRx92L8tLAm7GerjbZUujSwlFnIobqKkTuth+Q4ED4Vqqypp5JyfK8ah5Ji0f8AZVSGT2TZVGXfBLw/liOyqdRpJqfyXr8ldyEZrehKkm8Jr/2hc3Qb7EVk9DfMJbU98pu3k+6aETXXBebCZpt23tBaBUfSZRxdo98eYmgNfRxrh3zAnldDM/37FvZ+IiWtoQfddgiaEGBIDGCG7btA7jgBP9svAK2h90l4yYqIGop5jgMHXA4J0NB9ksR+YTX0qFtfqACO01jGjDHFPx552AW2W0P3uvGROk4NLfTvCeNS8X9MaDg1rL9Qz6PYh7En3f4ZNmKS6nUfQYFmE6PYe05IYBqPFGaq5wHlYpaoDbYqxokVK+JBerz51z+BIzc+SfSdTHVrTiSYtZzGFNOdGrr5ohsLF2+NUguqppkDoua6/S6yXwAYu44pM+/HiZ1BwEDWMqYbC5fjZ+MEBwMjb4PRLdTFYWrUwiUhJH/H+G3pMl/7fjqJhTGwSwU5lnfLsVDmxIPvmRetbJeCOsvfaxWXbXWxLVziqNky51BLW1OP2JKzgNoASSa7Gk1WAfrLI9mirzBBIUD1r/W/AgrMla7CjEMOzYBJolo30/mnxd0SzadPt5+eZtMb9O7rEN1wNINgEA8Ha+IxNMdrHLCQRR4TFRCudnmB7m6GqD0YDCqW+lQqlfnndw93iw/TJ/RwN5k+TqZDNJkAQyUvUlWvktjrdgbQEeI1EapN8Grd7MOeYJlfajSxWVOMfcIhVQXgfcFsqhcceobVA/U3GjsbDCYrjVSKSz0wHo8Xym6dArRvvjsbAfUGouFr8s5lG9o72DVVSy1saDqMqlarWW+12r2GiIXXMzuAU6AQcLLqWf3mZRf6iOlsNQdda9BudhQnvNNdPWN8XA7BgU5G2k3pLADA75XD3BSnn3y+3M90SbZWGczkxiRVmfSaJrd0V8u0yG3CeYRyht7O07Ste45weuqNmhcpLO44woEPRq1eilLN/f3ntEqGPFfzi2PmudHTO3EOEKf60LdTyUeDr7KIIzKfTfqtdr896JxklQtbES/IQD7UyL+SZIJSXYhLHkHZ9oqEjPR1MRzWu550cDYdCeI9n+S4hzouUU76+UeCQJ0fjkKn0+v3m703i0Eh/z97BCDH/XAAziTIt4rH94j7s4dHbSY/HJ90e3qriBQL+MMxCGETs9j/QxiSQ5PaS63/QsZqdS8vOxdvtj7Oc//fL4dTI2LvDAfVA6erSDKe3+cPxw70j4c5HHZlfLT9iAEZYKjZkxOYKZxymJy659l/t+QZllC5bvVJrzShD5GN0/NkiaZyqNcJh0NrdngtTfp7wviaHB+SS1Ng7O+Sk3h5HodT4S8RyY78pUmGM6eEg1l8tVCa1KnvY/SgrzDKsxRLF46j+uahNKH3BE6lsIb1lUxpUhdS3WUE+u6nPP/qiyAsklumMhMz9SBNqeus0oQ+QXqwIa7m3qy87IhXnBLPI8kVXXlZMaASm5vAEqWuKYkvHMtbPdiPiIdm6dVmeVMZjX+lfnKDWmaRAT7ev6ctTfhEF3RoWnJeXlKfSXcHcsf69rk0wTd4Qx30RV9yl5et2Ipwqe/SS5MJXiU8vbIv2b/qZaC8PZ65AUwj9QJR3vx1mQ9b7VPy1FFebnSpWq7xi0qJuwA+fLYpL7rwJdLXobcSa97kM4Cl35f3YXmofp0+8R9gBc/XeXL9Vn38pH7mLTs27z9T8ky1n7ynlZ0I4le78rYzl6t/woG5krwQlpcRcLDD2UPkH5F73C9G5tFKfY0q/wa1TIHI0CgAAA=="; final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork); diff --git a/pom.xml b/pom.xml index 3fd351c1d..8a2ac6b73 100644 --- a/pom.xml +++ b/pom.xml @@ -204,6 +204,12 @@ test + + eu.dnetlib + uoa-validator-engine2 + 0.9.0 + + org.slf4j jcl-over-slf4j