continuous validation WIP

This commit is contained in:
Claudio Atzori 2023-12-18 15:46:36 +01:00
parent a2feda6c07
commit 24173d7a0b
11 changed files with 101 additions and 415 deletions

View File

@ -689,7 +689,9 @@ public class GraphCleaningFunctions extends CleaningFunctions {
.filter(Objects::nonNull)
.filter(p -> Objects.nonNull(p.getQualifier()))
.filter(p -> StringUtils.isNotBlank(p.getValue()))
.filter(p -> StringUtils.contains(StringUtils.lowerCase(p.getQualifier().getClassid()), ORCID))
.filter(
p -> StringUtils
.contains(StringUtils.lowerCase(p.getQualifier().getClassid()), ORCID))
.map(p -> {
// hack to distinguish orcid from orcid_pending
String pidProvenance = getProvenance(p.getDataInfo());

View File

@ -7,10 +7,10 @@ import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import eu.dnetlib.pace.model.Person;
import jdk.nashorn.internal.ir.annotations.Ignore;
public class UtilTest {
@ -22,7 +22,7 @@ public class UtilTest {
}
@Test
@Ignore
@Disabled
public void paceResolverTest() {
PaceResolver paceResolver = new PaceResolver();
paceResolver.getComparator("keywordMatch", params);

View File

@ -19,10 +19,7 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@NoArgsConstructor
@AllArgsConstructor
@Getter
@Setter
public class BipProjectModel {
String projectId;
@ -34,6 +31,17 @@ public class BipProjectModel {
String totalCitationCount;
public BipProjectModel() {
}
public BipProjectModel(String projectId, String numOfInfluentialResults, String numOfPopularResults, String totalImpulse, String totalCitationCount) {
this.projectId = projectId;
this.numOfInfluentialResults = numOfInfluentialResults;
this.numOfPopularResults = numOfPopularResults;
this.totalImpulse = totalImpulse;
this.totalCitationCount = totalCitationCount;
}
// each project bip measure has exactly one value, hence one key-value pair
private Measure createMeasure(String measureId, String measureValue) {
@ -71,4 +79,43 @@ public class BipProjectModel {
createMeasure("totalCitationCount", totalCitationCount));
}
public String getProjectId() {
return projectId;
}
public void setProjectId(String projectId) {
this.projectId = projectId;
}
public String getNumOfInfluentialResults() {
return numOfInfluentialResults;
}
public void setNumOfInfluentialResults(String numOfInfluentialResults) {
this.numOfInfluentialResults = numOfInfluentialResults;
}
public String getNumOfPopularResults() {
return numOfPopularResults;
}
public void setNumOfPopularResults(String numOfPopularResults) {
this.numOfPopularResults = numOfPopularResults;
}
public String getTotalImpulse() {
return totalImpulse;
}
public void setTotalImpulse(String totalImpulse) {
this.totalImpulse = totalImpulse;
}
public String getTotalCitationCount() {
return totalCitationCount;
}
public void setTotalCitationCount(String totalCitationCount) {
this.totalCitationCount = totalCitationCount;
}
}

View File

@ -14,11 +14,7 @@
<artifactId>dhp-continuous-validation</artifactId>
<!-- The "version" is inherited from the parent module. -->
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
@ -29,40 +25,11 @@
<dependency>
<groupId>eu.dnetlib</groupId>
<artifactId>uoa-validator-engine2</artifactId>
<version>0.9.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${dhp.spark.version}</version>
<scope>provided</scope>
<exclusions>
<!-- This is an older version which causes problems. We have to add the latest version independently. -->
<!-- This exists only in Hadoop 3.0.0+ -->
<!--<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-api</artifactId>
</exclusion>-->
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
@ -70,181 +37,28 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${dhp.spark.version}</version>
<scope>provided</scope>
<exclusions>
<!-- This exclusion is a must for scala 2.11 and spark 2.4.0.cloudera2 -->
<exclusion>
<groupId>com.twitter</groupId>
<artifactId>parquet-format</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.parquet/parquet-avro -->
<!--
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>1.13.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${dhp.hadoop.version}</version>
<scope>provided</scope> <!-- This is required here, when setting the "version" above, even if that version is the same used in the main pom, where the dependency includes the "provided" scope. -->
<exclusions>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
</exclusion>
<!-- Vulnerable dependencies: -->
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.woodstox</groupId>
<artifactId>woodstox-core</artifactId>
</exclusion>
<!-- This dependency is required in order for the program to run without errors. It is discontinued. -->
<!--<exclusion>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</exclusion>-->
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId>
<version>${dhp.hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<!-- Vulnerable dependencies: -->
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Add back some updated version of the needed dependencies. -->
<!-- This should be enabled only when using Hadoop 3.0.0+ -->
<!--<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-api</artifactId>
<version>${dhp.hadoop.version}</version>
<scope>provided</scope>
</dependency>-->
<dependency> <!-- Newer versions (>=0.18.X) are not compatible with JAVA 8. -->
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.17.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.woodstox</groupId>
<artifactId>woodstox-core</artifactId>
<version>6.5.1</version>
</dependency>
-->
<!-- Other dependencies. -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${google.gson.version}</version>
</dependency>
<!-- logback versions 1.4.X require Java-11 -->
<!-- logback versions 1.3.X require Java-8, but if this project is added as Dependency in a Spring Boot App (with v.2.7.x), then Spring Boot throws an error, since it does not yet support logback 1.3.x -->
<!-- https://mvnrepository.com/artifact/ch.qos.logback/logback-core -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>1.2.13</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.36</version>
</dependency>
<!-- https://mvnrepository.com/artifact/ch.qos.logback/logback-classic -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.13</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-engine -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.10.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<id>libs</id>
<url>file:///${project.basedir}/libs</url>
</repository>
</repositories>
</project>

View File

@ -1,10 +1,13 @@
package eu.dnetlib.dhp.continuous_validator;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.validator2.validation.XMLApplicationProfile;
import eu.dnetlib.validator2.validation.guideline.openaire.*;
import eu.dnetlib.validator2.validation.utils.TestUtils;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
@ -13,15 +16,13 @@ import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.validator2.validation.XMLApplicationProfile;
import eu.dnetlib.validator2.validation.guideline.openaire.*;
import eu.dnetlib.validator2.validation.utils.TestUtils;
import scala.Option;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
public class ContinuousValidator {
public static final String TEST_FILES_V4_DIR = TestUtils.TEST_FILES_BASE_DIR + "openaireguidelinesV4/";
@ -74,7 +75,8 @@ public class ContinuousValidator {
guidelines = parser.get("openaire_guidelines");
if (guidelines == null) {
logger.error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile);
logger
.error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile);
return;
}
@ -135,17 +137,6 @@ public class ContinuousValidator {
String finalOutputPath = outputPath;
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
Dataset<Row> parquetFileDF = spark.read().parquet(finalParquet_file_path);
parquetFileDF.show(5);
// Filter the results based on the XML-encoding and non-null id and body.
parquetFileDF = parquetFileDF
.filter(
parquetFileDF
.col("encoding")
.eqNullSafe("XML")
.and(parquetFileDF.col("id").isNotNull())
.and(parquetFileDF.col("body").isNotNull()));
// Use a new instance of Document Builder in each worker, as it is not thread-safe.
MapFunction<Row, XMLApplicationProfile.ValidationResult> validateMapFunction = row -> profile
@ -155,58 +146,16 @@ public class ContinuousValidator {
.getDocumentBuilder()
.parse(IOUtils.toInputStream(row.getAs("body").toString(), StandardCharsets.UTF_8)));
Dataset<XMLApplicationProfile.ValidationResult> validationResultsDataset = parquetFileDF
.map(validateMapFunction, Encoders.bean(XMLApplicationProfile.ValidationResult.class));
if (logger.isTraceEnabled()) {
logger.trace("Showing a few validation-results.. just for checking");
validationResultsDataset.show(5);
}
// Write the results to json file immediately, without converting them to a list.
validationResultsDataset
spark
.read()
.parquet(finalParquet_file_path)
.filter("encoding = 'XML' and id != NULL and body != null")
.map(validateMapFunction, Encoders.bean(XMLApplicationProfile.ValidationResult.class))
.write()
.option("compression", "gzip")
.mode(SaveMode.Overwrite)
.json(finalOutputPath + RESULTS_FILE_NAME); // The filename should be the name of the input-file or the
// input-directory.
if (logger.isTraceEnabled()) {
List<XMLApplicationProfile.ValidationResult> validationResultsList = validationResultsDataset
.javaRDD()
.collect();
if (validationResultsList.isEmpty()) {
logger.error("The \"validationResultsList\" was empty!");
return;
}
validationResultsList.forEach(vr -> logger.trace(vr.id() + " | score:" + vr.score()));
for (XMLApplicationProfile.ValidationResult result : validationResultsList)
logger.trace(result.toString());
}
// TODO - REMOVE THIS WHEN THE WRITE FROM ABOVE IS OK
/*
* try (BufferedWriter writer = Files .newBufferedWriter(Paths.get(outputPath + RESULTS_FILE),
* StandardCharsets.UTF_8)) { writer.write(new Gson().toJson(validationResultsList)); } catch (Exception e)
* { logger.error("Error when writing the \"validationResultsList\" as json into the results-file: " +
* outputPath + RESULTS_FILE); return; }
*/
// TODO - Maybe the following section is not needed, when ran as an oozie workflow..
Option<String> uiWebUrl = spark.sparkContext().uiWebUrl();
if (uiWebUrl.isDefined()) {
logger
.info(
"Waiting 60 seconds, before shutdown, for the user to check the jobs' status at: "
+ uiWebUrl.get());
try {
Thread.sleep(60_000);
} catch (InterruptedException ignored) {
}
} else
logger.info("The \"uiWebUrl\" is not defined, in order to check the jobs' status. Shutting down..");
});
}
}

View File

@ -1,101 +0,0 @@
package eu.dnetlib.dhp.continuous_validator.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.avro.AvroParquetReader;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.util.HadoopInputFile;
import org.apache.parquet.io.InputFile;
import org.slf4j.LoggerFactory;
public class ParquetUtils {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ParquetUtils.class);
private static final Configuration parquetConfig = new Configuration();
public static List<GenericRecord> getParquetRecords(String fullFilePath) {
InputFile inputFile;
try { // TODO - Verify that this will create any directories which do not exist in the provided path. Currently
// we create the directories beforehand.
inputFile = HadoopInputFile.fromPath(new Path(fullFilePath), parquetConfig);
// logger.trace("Created the parquet " + outputFile); // DEBUG!
} catch (Throwable e) { // The simple "Exception" may not be thrown here, but an "Error" may be thrown.
// "Throwable" catches EVERYTHING!
logger.error("", e);
return null;
}
List<GenericRecord> records = new ArrayList<>();
GenericRecord record;
try (ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord> builder(inputFile).build()) {
while ((record = reader.read()) != null) {
records.add(record);
}
} catch (Throwable e) { // The simple "Exception" may not be thrown here, but an "Error" may be thrown.
// "Throwable" catches EVERYTHING!
logger.error("Problem when creating the \"ParquetWriter\" object or when writing the records with it!", e);
// At some point, I got an "NoSuchMethodError", because of a problem in the AvroSchema file:
// (java.lang.NoSuchMethodError: org.apache.avro.Schema.getLogicalType()Lorg/apache/avro/LogicalType;).
// The error was with the schema: {"name": "date", "type" : ["null", {"type" : "long", "logicalType" :
// "timestamp-millis"}]},
return null;
}
return records; // It may be empty.
}
public static Map<String, String> getIdXmlMapFromParquetFile(String parquetFileFullPath) {
List<GenericRecord> recordList = ParquetUtils.getParquetRecords(parquetFileFullPath);
if (recordList == null)
return null; // The error is already logged.
else if (recordList.isEmpty()) {
logger.error("The parquet-file \"" + parquetFileFullPath + "\" had no records inside!");
return null;
}
Map<String, String> idXmlMap = new HashMap<>();
for (GenericRecord record : recordList) {
if (logger.isTraceEnabled())
logger.trace(record.toString());
Object id = record.get("id");
if (id == null)
continue;
String idStr = id.toString();
Object encoding = record.get("encoding");
if (encoding == null) {
logger.warn("Record with id = \"" + idStr + "\" does not provide the encoding for its body!");
continue;
}
String encodingStr = encoding.toString();
if (!encodingStr.equals("XML")) {
logger.warn("Record with id = \"" + idStr + "\" does not have XML encoding for its body!");
continue;
}
Object body = record.get("body");
if (body == null) {
logger.warn("Record with id = \"" + idStr + "\" does not have a body!");
continue;
}
String bodyStr = body.toString();
idXmlMap.put(idStr, bodyStr);
// logger.debug(idStr + " | " + idXmlMap.get(idStr));
}
return idXmlMap;
}
}

View File

@ -67,10 +67,10 @@
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
<!--<property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>-->
</property>
</configuration>
</global>
@ -93,8 +93,8 @@
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
&#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
</spark-opts>

View File

@ -1,33 +0,0 @@
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.continuous_validator.utils.ParquetUtils;
import eu.dnetlib.validator2.validation.utils.TestUtils;
public class ReadParquetDataTest {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ReadParquetDataTest.class);
private static final String parquetFileFullPath = TestUtils.TEST_FILES_BASE_DIR
+ "part-00589-733117df-3822-4fce-bded-17289cc5959a-c000.snappy.parquet";
public static void main(String[] args) {
testParquetRead();
}
@Test
public static void testParquetRead() {
Map<String, String> idXmlMap = ParquetUtils.getIdXmlMapFromParquetFile(parquetFileFullPath);
if (idXmlMap == null) {
logger.error("Could not create the \"idXmlMap\" from parquet-file: " + parquetFileFullPath);
System.exit(99);
} else if (idXmlMap.isEmpty())
logger.warn("The generated \"idXmlMap\" was empty, for parquet-file: " + parquetFileFullPath);
else
logger.info("The \"idXmlMap\" was successfully generated, for parquet-file: " + parquetFileFullPath);
}
}

View File

@ -1,12 +1,14 @@
import com.google.gson.Gson;
import eu.dnetlib.dhp.continuous_validator.ContinuousValidator;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.List;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import eu.dnetlib.dhp.continuous_validator.ContinuousValidator;
public class ReadResultsTest {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ContinuousValidator.class);
@ -25,7 +27,8 @@ public class ReadResultsTest {
} catch (FileNotFoundException fnfe) {
logger.error("The results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\" does not exist!");
} catch (Exception e) {
logger.error("Error when reading the json-results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\"", e);
logger
.error("Error when reading the json-results-file \"" + ContinuousValidator.RESULTS_FILE_NAME + "\"", e);
}
}

View File

@ -30,7 +30,6 @@ import eu.dnetlib.dhp.common.collection.HttpClientParams;
import eu.dnetlib.dhp.schema.orcid.AuthorData;
import eu.dnetlib.doiboost.orcid.util.DownloadsReport;
import eu.dnetlib.doiboost.orcid.util.MultiAttemptsHttpConnector;
import jdk.nashorn.internal.ir.annotations.Ignore;
public class OrcidClientTest {
final int REQ_LIMIT = 24;
@ -152,7 +151,7 @@ public class OrcidClientTest {
}
// @Test
@Ignore
@Disabled
private void testModifiedDate() throws ParseException {
testDate(toRetrieveDate);
testDate(toNotRetrieveDate);
@ -332,7 +331,7 @@ public class OrcidClientTest {
}
@Test
@Ignore
@Disabled
void testUpdatedRecord() throws Exception {
final String base64CompressedRecord = IOUtils
.toString(getClass().getResourceAsStream("0000-0001-7281-6306.compressed.base64"));
@ -341,7 +340,7 @@ public class OrcidClientTest {
}
@Test
@Ignore
@Disabled
void testUpdatedWork() throws Exception {
final String base64CompressedWork = "H4sIAAAAAAAAAM1XS2/jNhC+51cQOuxJsiXZSR03Vmq0G6Bo013E6R56oyXaZiOJWpKy4y783zvUg5Ksh5uiCJogisX5Zjj85sHx3f1rFKI94YKyeGE4I9tAJPZZQOPtwvj9+cGaGUhIHAc4ZDFZGEcijHvv6u7A+MtcPVCSSgsUQObYzuzaccBEguVuYYxt+LHgbwKP6a11M3WnY6UzrpB7KuiahlQeF0aSrkPqGwhcisWcxpLwGIcLYydlMh+PD4fDiHGfBvDcjmMxLhGlBglSH8vsIH0qGlLqBFRIGvvDWjWQ1iMJJ2CKBANqGlNqMbkj3IpxRPq1KkypFZFoDRHa0aRfq8JoNjhnfIAJJS6xPouiIQJyeYmGQzE+cO5cXqITcItBlKyASExD0a93jiwtvJDjYXDDAqBPHoH2wMmVWGNf8xyyaEBiSTeUDHHWBpd2Nmmc10yfbgHQrHCyIRxKjQwRUoFKPRwEnIgBnQJQVdGeQgJaCRN0OMnPkaUFVbD9WkpaIndQJowf+8EFoIpTErJjBFQOBavElFpfUxwC9ZcqvQErdQXhe+oPFF8BaObupYzVsYEOARzSoZBWmKqaBMHcV0Wf8oG0beIqD+Gdkz0lhyE3NajUW6fhQFSV9Nw/MCBYyofYa0EN7wrBz13eP+Y+J6obWgE8Pdd2JpYD94P77Ezmjj13b0bu5PqPu3EXumEnxEJaEVxSUIHammsra+53z44zt2/m1/bItaeVtQ6dhs3c4XytvW75IYUchMKvEHVUyqmnWBFAS0VJrqSvQde6vp251ux2NtFuKcVOi+oK9YY0M0Cn6o4J6WkvtEK2XJ1vfPGAZxSoK8lb+SxJBbLQx1CohOLndjJUywQWUFmqEi3G6Zaqf/7buOyYJd5IYpfmf0XipfP18pDR9cQCeEuJQI/Lx36bFbVnpBeL2UwmqQw7ApAvf4GeGGQdEbENgolui/wdpjHaYCmPCIPPAmGBIsxfoLUhyRCB0SeCakEBJRKBtfJ+UBbI15TG4PaGBAhWthx8DmFYtHZQujv1CWbLLdzmmUKmHEOWCe1/zdu78bn/+YH+hCOqOzcXfFwuP6OVT/P710crwqGXFrpNaM2GT3MXarw01i15TIi3pmtJXgtbTVGf3h6HKfF+wBAnPyTfdCChudlm5gZaoG//F9pPZsGQcqqbyZN5hBau5OoIJ3PPwjTKDuG4s5MZp2rMzF5PZoK34IT6PIFOPrk+mTiVO5aJH2C+JJRjE/06eoRfpJxa4VgyYaLlaJUv/EhCfATMU/76gEOfmehL/qbJNNHjaFna+CQYB8wvo9PpPFJ5MOrJ1Ix7USBZqBl7KRNOx1d3jex7SG6zuijqCMWRusBsncjZSrM2u82UJmqzpGhvUJN2t6caIM9QQgO9c0t40UROnWsJd2Rbs+nsxpna9u30ttNkjechmzHjEST+X5CkkuNY0GzQkzyFseAf7lSZuLwdh1xSXKvvQJ4g4abTYgPV7uMt3rskohlJmMa82kQkshtyBEIYqQ+YB8X3oRHg7iFKi/bZP+Ao+T6BJhIT/vNPi8ffZs+flk+r2v0WNroZiyWn6xRmadHqTJXsjLJczElAZX6TnJdoWTM1SI2gfutv3rjeBt5t06rVvNuWup29246tlvluO+u2/G92bK9DXheL6uFd/Q3EaRDZqBIAAA==";
final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork);
@ -413,7 +412,7 @@ public class OrcidClientTest {
}
@Test
@Ignore
@Disabled
void testDownloadedAuthor() throws Exception {
final String base64CompressedWork = "H4sIAAAAAAAAAI2Yy26jMBSG932KiD0hIe1MiwiVZjGLkWbX2XRHsFOsgs3YJmnefszFFy4+mUhtVPz9P/gcH/vQ9PWrrjYXzAVh9Bjst7tgg2nBEKEfx+DP28/wOdgImVOUV4ziY3DDInjNHlKOC8ZRMnxtmlyWxyDaqU+ofg7h/uX7IYwfn+Ngo25ARUKoxJzm1TEopWySKLper1vGC4LU74+IikgTWoFRW+SyfyyfxCBag4iQhBawyoGMDjdqJrnECJAZRquYLDEPaV5jv8oyWlXj+qTiXZLGr7KMiQbnjAOR6IY1W7C6hgIwjGt6SKGfHsY13ajHYipLIcIyJ5Xw6+akdvjEtyt4wxEwM6+VGph5N2zYr2ENhQRhKsmZYChmS1j7nFs6VIBPOwImKhyfMVeFg6GAWEjrcoQ4FoBmBGwVXYhagGHDBIEX+ZzUDiqyn35VN6rJUpUJ4zc/PAI2T03FbrUKJZQszWjV3zavVOjvVfoE01qB+YUUQPGNwHTt3luxJjdqh1AxJFBKLWOrSeCcF13RtxxYtlPOPqH6m+MLwVfoMQ2kdae2ArLajc6fTxkI1nIoegs0yB426pMO+0fSw07xDKMu0XKSde5C2VvrlVMijRzFwqY7XTJI1QMLWcmEzMxtDdxfHiYSgTNJnYJ1K9y5k0tUrMgrnGGaRiuXxxuClulYUbr0nBvpkYLjvgTCGsuSoex3f1CEvRPHKI184NJKtKeaiO7cD5E61bJ4F+9DFd7d01u8Tw6H5BBvvz8f3q3nXLGIeJULGdaqeVBBRK7rS7h/fNvvk/gpedxt4923dxP7Fc3KtKuc1BhlkrfYmeN4dcmrhmbw60+HmWw2CKgbTuqc32CXKTTmeTWT6bDBjPsQ0DTpnchdaYO0ayQ2FyLIiVREqs25aU8VKYLRbK0BsyZuqvr1MU2Sm/rDdhe/2CRN6FU/b+oBVyj1zqRtC5F8kAumfTclsl+s7EoNQu64nfOaVLeezX60Z3XCULLi6GI2IZGTEeey7fec9lBAuXawIHKcpifE7GABHWfoxLVfpUNPBXoMbZWrHFsR3bPAk9J9i2sw9nW6AQT1mpk++7JhW+v44Hmt8PomJqfD13jRnvFOSxCKtu6qHoyBbQ7cMFo750UEfGaXm6bEeplXIXj2hvL6mA7tzvIwmM9pbJFBG834POZdLGi2gH2u9u0K9HMwn5PTioFWLufzmrS4oNuU9Pkt2rf/2jMs7fMdm2rQTTM+j+49AzToAVuXYA1mD2k0+XdE9vAP+JYR5NcQAAA=";
final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork);
@ -421,7 +420,7 @@ public class OrcidClientTest {
}
@Test
@Ignore
@Disabled
void testDownloadedWork() throws Exception {
final String base64CompressedWork = "H4sIAAAAAAAAANVa63LiOBb+z1Oo+LVbhbkGAlTCLE1Id9IhTQV6unr/CVvB2tiWR5Khmal5rX2BfbE9ki3b3Jzt6Y13h6pQSPrOXTo6knL10zffQxvCBWXBdbVVb1YRCWzm0GB9Xf28vLX6VSQkDhzssYBcV3dEVH8aVa62jL8M1RcKI2kBAYwNLnrtXrMPFCGW7nW10YSPBX8dq3XRb1swNGgomkaG3FBBV9SjcnddDaOVR+0qApUCMaSBJDzA3nXVlTIcNhrb7bbOuE0d+F43AtEwCENBnMjGUhtyjiSFGBqHCkkDu5gqB0rpSMgJsCJOAVmKMVRMuoRbAfbJeaoMY6h84q8gQi4Nz1NlmNQbnDNe4Ak1bLA28/0iB8TjBg1GMV5gdzxu0CGoxSBKlkMkpp44T3eINBxeyG5bKDABpJb7QF1guRpOsd/iOWRRhwSSPlNS5LNjsOHzHAXxmjlHmwBSr3DyTDgsNVLkkAxk6LDjcCIKaBJAtoo2FCagFTJBiyf5IdJwUAv2PJUaNUgXlgnju/PgBJDFKfTYzgdXFgXLYAzVLxH2wPWvrfQ9mKEVhG+oXbD4EsD+3H1txqaxgQwBPqRFIc0w2WoSBHNbLfqIF0zbfVymIbQ52VCyLVIzBRm6VeQVRFWNHuoHDASLeJH3jqDVUQXB5yrOH0ObE5UNLQe+R+1mu2U1u1Z7sGy2hq3esN2tt5oXf79qnELv8fGwkJYPmxSswD1uA6vVXrY7w+5g2G3WuxedjNsJmj2escJx33G/ZXsU5iAs/AyRR0WcjpRXBLglc0lM1BjP59bX1qw9Hn/+dH87/dy9vBikeinKkyzVHjoqJNWIk7QuE3KU6pES6O7MwsarJh44QW1KowcWOCxAC9tlzEPsGX3YrYGQICgS0JKzENach2bEoTYNyKEQzaJyQnzSqesKSaV3IhRx92L8tLAm7GerjbZUujSwlFnIobqKkTuth+Q4ED4Vqqypp5JyfK8ah5Ji0f8AZVSGT2TZVGXfBLw/liOyqdRpJqfyXr8ldyEZrehKkm8Jr/2hc3Qb7EVk9DfMJbU98pu3k+6aETXXBebCZpt23tBaBUfSZRxdo98eYmgNfRxrh3zAnldDM/37FvZ+IiWtoQfddgiaEGBIDGCG7btA7jgBP9svAK2h90l4yYqIGop5jgMHXA4J0NB9ksR+YTX0qFtfqACO01jGjDHFPx552AW2W0P3uvGROk4NLfTvCeNS8X9MaDg1rL9Qz6PYh7En3f4ZNmKS6nUfQYFmE6PYe05IYBqPFGaq5wHlYpaoDbYqxokVK+JBerz51z+BIzc+SfSdTHVrTiSYtZzGFNOdGrr5ohsLF2+NUguqppkDoua6/S6yXwAYu44pM+/HiZ1BwEDWMqYbC5fjZ+MEBwMjb4PRLdTFYWrUwiUhJH/H+G3pMl/7fjqJhTGwSwU5lnfLsVDmxIPvmRetbJeCOsvfaxWXbXWxLVziqNky51BLW1OP2JKzgNoASSa7Gk1WAfrLI9mirzBBIUD1r/W/AgrMla7CjEMOzYBJolo30/mnxd0SzadPt5+eZtMb9O7rEN1wNINgEA8Ha+IxNMdrHLCQRR4TFRCudnmB7m6GqD0YDCqW+lQqlfnndw93iw/TJ/RwN5k+TqZDNJkAQyUvUlWvktjrdgbQEeI1EapN8Grd7MOeYJlfajSxWVOMfcIhVQXgfcFsqhcceobVA/U3GjsbDCYrjVSKSz0wHo8Xym6dArRvvjsbAfUGouFr8s5lG9o72DVVSy1saDqMqlarWW+12r2GiIXXMzuAU6AQcLLqWf3mZRf6iOlsNQdda9BudhQnvNNdPWN8XA7BgU5G2k3pLADA75XD3BSnn3y+3M90SbZWGczkxiRVmfSaJrd0V8u0yG3CeYRyht7O07Ste45weuqNmhcpLO44woEPRq1eilLN/f3ntEqGPFfzi2PmudHTO3EOEKf60LdTyUeDr7KIIzKfTfqtdr896JxklQtbES/IQD7UyL+SZIJSXYhLHkHZ9oqEjPR1MRzWu550cDYdCeI9n+S4hzouUU76+UeCQJ0fjkKn0+v3m703i0Eh/z97BCDH/XAAziTIt4rH94j7s4dHbSY/HJ90e3qriBQL+MMxCGETs9j/QxiSQ5PaS63/QsZqdS8vOxdvtj7Oc//fL4dTI2LvDAfVA6erSDKe3+cPxw70j4c5HHZlfLT9iAEZYKjZkxOYKZxymJy659l/t+QZllC5bvVJrzShD5GN0/NkiaZyqNcJh0NrdngtTfp7wviaHB+SS1Ng7O+Sk3h5HodT4S8RyY78pUmGM6eEg1l8tVCa1KnvY/SgrzDKsxRLF46j+uahNKH3BE6lsIb1lUxpUhdS3WUE+u6nPP/qiyAsklumMhMz9SBNqeus0oQ+QXqwIa7m3qy87IhXnBLPI8kVXXlZMaASm5vAEqWuKYkvHMtbPdiPiIdm6dVmeVMZjX+lfnKDWmaRAT7ev6ctTfhEF3RoWnJeXlKfSXcHcsf69rk0wTd4Qx30RV9yl5et2Ipwqe/SS5MJXiU8vbIv2b/qZaC8PZ65AUwj9QJR3vx1mQ9b7VPy1FFebnSpWq7xi0qJuwA+fLYpL7rwJdLXobcSa97kM4Cl35f3YXmofp0+8R9gBc/XeXL9Vn38pH7mLTs27z9T8ky1n7ynlZ0I4le78rYzl6t/woG5krwQlpcRcLDD2UPkH5F73C9G5tFKfY0q/wa1TIHI0CgAAA==";
final String work = ArgumentApplicationParser.decompressValue(base64CompressedWork);

View File

@ -204,6 +204,12 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>eu.dnetlib</groupId>
<artifactId>uoa-validator-engine2</artifactId>
<version>0.9.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>