This commit is contained in:
Miriam Baglioni 2021-10-01 12:59:47 +02:00
parent c8321ad31a
commit c4ccd7b32c
22 changed files with 1273 additions and 1118 deletions

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.oa.dedup; package eu.dnetlib.dhp.oa.dedup;
import java.io.IOException; import java.io.IOException;
@ -81,10 +82,12 @@ public class SparkWhitelistSimRels extends AbstractSparkAction {
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
//file format: source####target // file format: source####target
Dataset<Tuple2<String, String>> whiteListRels = spark.createDataset(sc Dataset<Tuple2<String, String>> whiteListRels = spark
.createDataset(
sc
.textFile(whiteListPath) .textFile(whiteListPath)
//check if the line is in the correct format: id1####id2 // check if the line is in the correct format: id1####id2
.filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2) .filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2)
.map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1])) .map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1]))
.rdd(), .rdd(),
@ -99,7 +102,9 @@ public class SparkWhitelistSimRels extends AbstractSparkAction {
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity); final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
Dataset<Tuple2<String, String>> entities = spark.createDataset(sc Dataset<Tuple2<String, String>> entities = spark
.createDataset(
sc
.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity)) .textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
.repartition(numPartitions) .repartition(numPartitions)
.mapToPair( .mapToPair(
@ -112,17 +117,20 @@ public class SparkWhitelistSimRels extends AbstractSparkAction {
Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels
.joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner") .joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner")
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING())); .map(
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1 Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1
.joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner") .joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner")
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING())); .map(
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
Dataset<Relation> whiteListSimRels = whiteListRels2 Dataset<Relation> whiteListSimRels = whiteListRels2
.map((MapFunction<Tuple2<String, String>, Relation>) .map(
r -> createSimRel(r._1(), r._2(), entity), (MapFunction<Tuple2<String, String>, Relation>) r -> createSimRel(r._1(), r._2(), entity),
Encoders.bean(Relation.class) Encoders.bean(Relation.class));
);
saveParquet(whiteListSimRels, outputPath, SaveMode.Append); saveParquet(whiteListSimRels, outputPath, SaveMode.Append);
} }

View File

@ -171,7 +171,7 @@ public class SparkDedupTest implements Serializable {
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", testGraphBasePath, "-i", testGraphBasePath,
"-asi", testActionSetId, "-asi", testActionSetId,
"-la", "lookupurl", "-la", "lookupurl",
@ -226,7 +226,7 @@ public class SparkDedupTest implements Serializable {
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", testGraphBasePath, "-i", testGraphBasePath,
"-asi", testActionSetId, "-asi", testActionSetId,
"-la", "lookupurl", "-la", "lookupurl",
@ -257,29 +257,35 @@ public class SparkDedupTest implements Serializable {
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "otherresearchproduct")) .load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "otherresearchproduct"))
.count(); .count();
//entities simrels supposed to be equal to the number of previous step (no rels in whitelist) // entities simrels supposed to be equal to the number of previous step (no rels in whitelist)
assertEquals(3082, orgs_simrel); assertEquals(3082, orgs_simrel);
assertEquals(7036, pubs_simrel); assertEquals(7036, pubs_simrel);
assertEquals(442, ds_simrel); assertEquals(442, ds_simrel);
assertEquals(6750, orp_simrel); assertEquals(6750, orp_simrel);
//entities simrels to be different from the number of previous step (new simrels in the whitelist) // entities simrels to be different from the number of previous step (new simrels in the whitelist)
Dataset<Row> sw_simrel = spark Dataset<Row> sw_simrel = spark
.read() .read()
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "software")); .load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "software"));
//check if the first relation in the whitelist exists // check if the first relation in the whitelist exists
assertTrue(sw_simrel assertTrue(
sw_simrel
.as(Encoders.bean(Relation.class)) .as(Encoders.bean(Relation.class))
.toJavaRDD() .toJavaRDD()
.filter(rel -> .filter(
rel.getSource().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[0]) && rel.getTarget().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[1])).count() > 0); rel -> rel.getSource().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[0])
//check if the second relation in the whitelist exists && rel.getTarget().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[1]))
assertTrue(sw_simrel .count() > 0);
// check if the second relation in the whitelist exists
assertTrue(
sw_simrel
.as(Encoders.bean(Relation.class)) .as(Encoders.bean(Relation.class))
.toJavaRDD() .toJavaRDD()
.filter(rel -> .filter(
rel.getSource().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[0]) && rel.getTarget().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[1])).count() > 0); rel -> rel.getSource().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[0])
&& rel.getTarget().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[1]))
.count() > 0);
assertEquals(338, sw_simrel.count()); assertEquals(338, sw_simrel.count());
@ -298,7 +304,7 @@ public class SparkDedupTest implements Serializable {
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", "-i",
testGraphBasePath, testGraphBasePath,
"-asi", "-asi",
@ -394,7 +400,7 @@ public class SparkDedupTest implements Serializable {
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", "-i",
testGraphBasePath, testGraphBasePath,
"-asi", "-asi",
@ -449,7 +455,7 @@ public class SparkDedupTest implements Serializable {
"/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json"))); "/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json")));
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", "-i",
testGraphBasePath, testGraphBasePath,
"-asi", "-asi",
@ -496,7 +502,7 @@ public class SparkDedupTest implements Serializable {
"/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json"))); "/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath "-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
}); });
@ -612,7 +618,7 @@ public class SparkDedupTest implements Serializable {
"/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json"))); "/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
parser parser
.parseArgument( .parseArgument(
new String[]{ new String[] {
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath "-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
}); });

View File

@ -84,7 +84,8 @@ public class PropagationConstant {
return di; return di;
} }
public static Qualifier getQualifier(String inference_class_id, String inference_class_name, String qualifierSchema) { public static Qualifier getQualifier(String inference_class_id, String inference_class_name,
String qualifierSchema) {
Qualifier pa = new Qualifier(); Qualifier pa = new Qualifier();
pa.setClassid(inference_class_id); pa.setClassid(inference_class_id);
pa.setClassname(inference_class_name); pa.setClassname(inference_class_name);
@ -108,7 +109,11 @@ public class PropagationConstant {
r.setRelClass(rel_class); r.setRelClass(rel_class);
r.setRelType(rel_type); r.setRelType(rel_type);
r.setSubRelType(subrel_type); r.setSubRelType(subrel_type);
r.setDataInfo(getDataInfo(inference_provenance, inference_class_id, inference_class_name, ModelConstants.DNET_PROVENANCE_ACTIONS)); r
.setDataInfo(
getDataInfo(
inference_provenance, inference_class_id, inference_class_name,
ModelConstants.DNET_PROVENANCE_ACTIONS));
return r; return r;
} }

View File

@ -173,7 +173,10 @@ public class SparkOrcidToResultFromSemRelJob {
if (toaddpid) { if (toaddpid) {
StructuredProperty p = new StructuredProperty(); StructuredProperty p = new StructuredProperty();
p.setValue(autoritative_author.getOrcid()); p.setValue(autoritative_author.getOrcid());
p.setQualifier(getQualifier(ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME, ModelConstants.DNET_PID_TYPES)); p
.setQualifier(
getQualifier(
ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME, ModelConstants.DNET_PID_TYPES));
p p
.setDataInfo( .setDataInfo(
getDataInfo( getDataInfo(

View File

@ -10,7 +10,6 @@ import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -22,6 +21,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import scala.Tuple2; import scala.Tuple2;

View File

@ -7,7 +7,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -20,6 +19,7 @@ import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList; import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import scala.Tuple2; import scala.Tuple2;

View File

@ -6,6 +6,8 @@ import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.*; import eu.dnetlib.dhp.schema.dump.oaf.*;
@ -23,8 +25,6 @@ import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import eu.dnetlib.dhp.schema.dump.oaf.community.Context; import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult; import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
public class ResultMapper implements Serializable { public class ResultMapper implements Serializable {
@ -278,16 +278,17 @@ public class ResultMapper implements Serializable {
} }
Optional Optional
.ofNullable(input.getPid()) .ofNullable(input.getPid())
.ifPresent( .ifPresent(
value -> out.setPid(value value -> out
.setPid(
value
.stream() .stream()
.map( .map(
p -> p -> ControlledField
ControlledField .newInstance(p.getQualifier().getClassid(), p.getValue()))
.newInstance(p.getQualifier().getClassid(), p.getValue())).collect(Collectors.toList()))); .collect(Collectors.toList())));
oStr = Optional.ofNullable(input.getDateofacceptance()); oStr = Optional.ofNullable(input.getDateofacceptance());
if (oStr.isPresent()) { if (oStr.isPresent()) {
@ -298,10 +299,9 @@ public class ResultMapper implements Serializable {
out.setPublisher(oStr.get().getValue()); out.setPublisher(oStr.get().getValue());
} }
Optional Optional
.ofNullable(input.getSource()) .ofNullable(input.getSource())
.ifPresent(value -> out.setSource(value.stream().map(s -> s.getValue()).collect(Collectors.toList()) )); .ifPresent(value -> out.setSource(value.stream().map(s -> s.getValue()).collect(Collectors.toList())));
// value.stream().forEach(s -> sourceList.add(s.getValue()))); // value.stream().forEach(s -> sourceList.add(s.getValue())));
// out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList())); // out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList()));
List<Subject> subjectList = new ArrayList<>(); List<Subject> subjectList = new ArrayList<>();
@ -601,24 +601,36 @@ public class ResultMapper implements Serializable {
private static Pid getOrcid(List<StructuredProperty> p) { private static Pid getOrcid(List<StructuredProperty> p) {
List<StructuredProperty> pid_list = p.stream().map(pid -> { List<StructuredProperty> pid_list = p.stream().map(pid -> {
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) || if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))){ (pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))) {
return pid; return pid;
} }
return null; return null;
}).filter(pid -> pid != null).collect(Collectors.toList()); }).filter(pid -> pid != null).collect(Collectors.toList());
if(pid_list.size() == 1){ if (pid_list.size() == 1) {
return getAuthorPid(pid_list.get(0)); return getAuthorPid(pid_list.get(0));
} }
List<StructuredProperty> orcid = pid_list.stream().filter(ap -> ap.getQualifier().getClassid() List<StructuredProperty> orcid = pid_list
.equals(ModelConstants.ORCID)).collect(Collectors.toList()); .stream()
if(orcid.size() == 1){ .filter(
ap -> ap
.getQualifier()
.getClassid()
.equals(ModelConstants.ORCID))
.collect(Collectors.toList());
if (orcid.size() == 1) {
return getAuthorPid(orcid.get(0)); return getAuthorPid(orcid.get(0));
} }
orcid = pid_list.stream().filter(ap -> ap.getQualifier().getClassid() orcid = pid_list
.equals(ModelConstants.ORCID_PENDING)).collect(Collectors.toList()); .stream()
if(orcid.size() == 1){ .filter(
ap -> ap
.getQualifier()
.getClassid()
.equals(ModelConstants.ORCID_PENDING))
.collect(Collectors.toList());
if (orcid.size() == 1) {
return getAuthorPid(orcid.get(0)); return getAuthorPid(orcid.get(0));
} }

View File

@ -8,8 +8,6 @@ import java.io.StringReader;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.community.Validated;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -28,9 +26,11 @@ import org.xml.sax.SAXException;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.Provenance; import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder; import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project; import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
import eu.dnetlib.dhp.schema.dump.oaf.community.Validated;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Field; import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
@ -80,7 +80,9 @@ public class SparkPrepareResultProject implements Serializable {
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) { private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) {
Dataset<Relation> relation = Utils Dataset<Relation> relation = Utils
.readPath(spark, inputPath + "/relation", Relation.class) .readPath(spark, inputPath + "/relation", Relation.class)
.filter("dataInfo.deletedbyinference = false and lower(relClass) = '" + ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'"); .filter(
"dataInfo.deletedbyinference = false and lower(relClass) = '"
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class); .readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
@ -159,7 +161,7 @@ public class SparkPrepareResultProject implements Serializable {
provenance.setTrust(di.get().getTrust()); provenance.setTrust(di.get().getTrust());
p.setProvenance(provenance); p.setProvenance(provenance);
} }
if (relation.getValidated()){ if (relation.getValidated()) {
p.setValidated(Validated.newInstance(relation.getValidated(), relation.getValidationDate())); p.setValidated(Validated.newInstance(relation.getValidated(), relation.getValidationDate()));
} }
return p; return p;

View File

@ -9,7 +9,6 @@ import java.io.StringReader;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.ForeachFunction; import org.apache.spark.api.java.function.ForeachFunction;
@ -23,6 +22,8 @@ import org.dom4j.DocumentException;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts; import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.common.ModelSupport; import eu.dnetlib.dhp.schema.common.ModelSupport;
@ -453,18 +454,20 @@ public class DumpGraphEntities implements Serializable {
private static <E extends OafEntity> void organizationMap(SparkSession spark, String inputPath, String outputPath, private static <E extends OafEntity> void organizationMap(SparkSession spark, String inputPath, String outputPath,
Class<E> inputClazz) { Class<E> inputClazz) {
Utils.readPath(spark, inputPath, inputClazz) Utils
.readPath(spark, inputPath, inputClazz)
.map( .map(
(MapFunction<E, Organization>) o -> mapOrganization((eu.dnetlib.dhp.schema.oaf.Organization) o), (MapFunction<E, Organization>) o -> mapOrganization((eu.dnetlib.dhp.schema.oaf.Organization) o),
Encoders.bean(Organization.class)) Encoders.bean(Organization.class))
.filter((FilterFunction<Organization>) o -> o!= null) .filter((FilterFunction<Organization>) o -> o != null)
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.option("compression", "gzip") .option("compression", "gzip")
.json(outputPath); .json(outputPath);
} }
private static eu.dnetlib.dhp.schema.dump.oaf.graph.Organization mapOrganization(eu.dnetlib.dhp.schema.oaf.Organization org) { private static eu.dnetlib.dhp.schema.dump.oaf.graph.Organization mapOrganization(
eu.dnetlib.dhp.schema.oaf.Organization org) {
if (org.getDataInfo().getDeletedbyinference()) if (org.getDataInfo().getDeletedbyinference())
return null; return null;
Organization organization = new Organization(); Organization organization = new Organization();

View File

@ -5,8 +5,6 @@ import java.io.StringReader;
import java.util.*; import java.util.*;
import java.util.function.Consumer; import java.util.function.Consumer;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
import org.dom4j.Element; import org.dom4j.Element;
@ -15,6 +13,8 @@ import org.dom4j.io.SAXReader;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.utils.DHPUtils; import eu.dnetlib.dhp.utils.DHPUtils;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
@ -140,7 +140,7 @@ public class QueryInformationSystem {
} }
private String makeOpenaireId(Node el, String prefix) { private String makeOpenaireId(Node el, String prefix) {
if (!prefix.equals(ModelSupport.entityIdPrefix.get("project"))){ if (!prefix.equals(ModelSupport.entityIdPrefix.get("project"))) {
return null; return null;
} }
String funder = null; String funder = null;

View File

@ -107,7 +107,7 @@ public class SparkDumpRelationJob implements Serializable {
} }
} }
} }
if(relation.getValidated()){ if (relation.getValidated()) {
rel_new.setValidated(relation.getValidated()); rel_new.setValidated(relation.getValidated());
rel_new.setValidationDate(relation.getValidationDate()); rel_new.setValidationDate(relation.getValidationDate());
} }

View File

@ -78,7 +78,6 @@ public class SparkDumpFunderResults implements Serializable {
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class)) .union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class)); .union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
List<String> funderList = project List<String> funderList = project
.select("id") .select("id")
.map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING()) .map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING())

View File

@ -80,7 +80,6 @@ public class SparkResultLinkedToProject implements Serializable {
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz, private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
String inputPath, String outputPath, String graphPath) { String inputPath, String outputPath, String graphPath) {
Dataset<R> results = Utils Dataset<R> results = Utils
.readPath(spark, inputPath, inputClazz) .readPath(spark, inputPath, inputClazz)
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false"); .filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");

View File

@ -7,17 +7,10 @@ import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.sun.xml.internal.ws.policy.AssertionSet;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.neethi.Assertion;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row; import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
@ -25,10 +18,14 @@ import org.junit.jupiter.api.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson; import com.google.gson.Gson;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap; import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult; import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult; import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
@ -145,7 +142,7 @@ public class DumpJobTest {
} }
@Test @Test
public void testPublicationDump(){ public void testPublicationDump() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance")
.getPath(); .getPath();
@ -174,41 +171,92 @@ public class DumpJobTest {
GraphResult gr = verificationDataset.first(); GraphResult gr = verificationDataset.first();
Assertions.assertEquals(2, gr.getMeasures().size()); Assertions.assertEquals(2, gr.getMeasures().size());
Assertions.assertTrue(gr.getMeasures().stream().anyMatch(m -> m.getKey().equals("influence") Assertions
.assertTrue(
gr
.getMeasures()
.stream()
.anyMatch(
m -> m.getKey().equals("influence")
&& m.getValue().equals("1.62759106106e-08"))); && m.getValue().equals("1.62759106106e-08")));
Assertions.assertTrue(gr.getMeasures().stream().anyMatch(m -> m.getKey().equals("popularity") Assertions
.assertTrue(
gr
.getMeasures()
.stream()
.anyMatch(
m -> m.getKey().equals("popularity")
&& m.getValue().equals("0.22519296"))); && m.getValue().equals("0.22519296")));
Assertions.assertEquals(6, gr.getAuthor().size()); Assertions.assertEquals(6, gr.getAuthor().size());
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Nikolaidou,Charitini") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Nikolaidou,Charitini") &&
a.getName().equals("Charitini") && a.getSurname().equals("Nikolaidou") a.getName().equals("Charitini") && a.getSurname().equals("Nikolaidou")
&& a.getRank() == 1 && a.getPid() == null)); && a.getRank() == 1 && a.getPid() == null));
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Votsi,Nefta") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Votsi,Nefta") &&
a.getName().equals("Nefta") && a.getSurname().equals("Votsi") a.getName().equals("Nefta") && a.getSurname().equals("Votsi")
&& a.getRank() == 2 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID) && a.getRank() == 2 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null)); && a.getPid().getId().getValue().equals("0000-0001-6651-1178")
&& a.getPid().getProvenance() != null));
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Sgardelis,Steanos") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Sgardelis,Steanos") &&
a.getName().equals("Steanos") && a.getSurname().equals("Sgardelis") a.getName().equals("Steanos") && a.getSurname().equals("Sgardelis")
&& a.getRank() == 3 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING) && a.getRank() == 3 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null)); && a.getPid().getId().getValue().equals("0000-0001-6651-1178")
&& a.getPid().getProvenance() != null));
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Halley,John") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Halley,John") &&
a.getName().equals("John") && a.getSurname().equals("Halley") a.getName().equals("John") && a.getSurname().equals("Halley")
&& a.getRank() == 4 && a.getPid() == null)); && a.getRank() == 4 && a.getPid() == null));
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Pantis,John") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Pantis,John") &&
a.getName().equals("John") && a.getSurname().equals("Pantis") a.getName().equals("John") && a.getSurname().equals("Pantis")
&& a.getRank() == 5 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID) && a.getRank() == 5 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null)); && a.getPid().getId().getValue().equals("0000-0001-6651-1178")
&& a.getPid().getProvenance() != null));
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Tsiafouli,Maria") && Assertions
.assertTrue(
gr
.getAuthor()
.stream()
.anyMatch(
a -> a.getFullname().equals("Tsiafouli,Maria") &&
a.getName().equals("Maria") && a.getSurname().equals("Tsiafouli") a.getName().equals("Maria") && a.getSurname().equals("Tsiafouli")
&& a.getRank() == 6 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING) && a.getRank() == 6 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null)); && a.getPid().getId().getValue().equals("0000-0001-6651-1178")
&& a.getPid().getProvenance() != null));
Assertions.assertEquals("publication", gr.getType()); Assertions.assertEquals("publication", gr.getType());
@ -216,27 +264,52 @@ public class DumpJobTest {
Assertions.assertEquals("English", gr.getLanguage().getLabel()); Assertions.assertEquals("English", gr.getLanguage().getLabel());
Assertions.assertEquals(1, gr.getCountry().size()); Assertions.assertEquals(1, gr.getCountry().size());
Assertions.assertEquals("IT" , gr.getCountry().get(0).getCode()); Assertions.assertEquals("IT", gr.getCountry().get(0).getCode());
Assertions.assertEquals("Italy" , gr.getCountry().get(0).getLabel()); Assertions.assertEquals("Italy", gr.getCountry().get(0).getLabel());
Assertions.assertTrue( gr.getCountry().get(0).getProvenance() == null); Assertions.assertTrue(gr.getCountry().get(0).getProvenance() == null);
Assertions.assertEquals(12, gr.getSubjects().size()); Assertions.assertEquals(12, gr.getSubjects().size());
Assertions.assertTrue(gr.getSubjects().stream().anyMatch(s -> s.getSubject().getValue().equals("Ecosystem Services hotspots") Assertions
.assertTrue(
gr
.getSubjects()
.stream()
.anyMatch(
s -> s.getSubject().getValue().equals("Ecosystem Services hotspots")
&& s.getSubject().getScheme().equals("ACM") && s.getProvenance() != null && && s.getSubject().getScheme().equals("ACM") && s.getProvenance() != null &&
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository"))); s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
Assertions.assertTrue(gr.getSubjects().stream().anyMatch(s -> s.getSubject().getValue().equals("Natura 2000") Assertions
.assertTrue(
gr
.getSubjects()
.stream()
.anyMatch(
s -> s.getSubject().getValue().equals("Natura 2000")
&& s.getSubject().getScheme().equals("") && s.getProvenance() != null && && s.getSubject().getScheme().equals("") && s.getProvenance() != null &&
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository"))); s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
Assertions.assertEquals("Ecosystem Service capacity is higher in areas of multiple designation types", Assertions
.assertEquals(
"Ecosystem Service capacity is higher in areas of multiple designation types",
gr.getMaintitle()); gr.getMaintitle());
Assertions.assertEquals(null, gr.getSubtitle()); Assertions.assertEquals(null, gr.getSubtitle());
Assertions.assertEquals(1, gr.getDescription().size()); Assertions.assertEquals(1, gr.getDescription().size());
Assertions.assertTrue(gr.getDescription().get(0).startsWith("The implementation of the Ecosystem Service (ES) concept into practice")); Assertions
Assertions.assertTrue(gr.getDescription().get(0).endsWith("start complying with new standards and demands for nature conservation and environmental management.")); .assertTrue(
gr
.getDescription()
.get(0)
.startsWith("The implementation of the Ecosystem Service (ES) concept into practice"));
Assertions
.assertTrue(
gr
.getDescription()
.get(0)
.endsWith(
"start complying with new standards and demands for nature conservation and environmental management."));
Assertions.assertEquals("2017-01-01", gr.getPublicationdate()); Assertions.assertEquals("2017-01-01", gr.getPublicationdate());
@ -255,7 +328,9 @@ public class DumpJobTest {
Assertions.assertEquals(0, gr.getCoverage().size()); Assertions.assertEquals(0, gr.getCoverage().size());
Assertions.assertEquals(ModelConstants.ACCESS_RIGHT_OPEN, gr.getBestaccessright().getLabel()); Assertions.assertEquals(ModelConstants.ACCESS_RIGHT_OPEN, gr.getBestaccessright().getLabel());
Assertions.assertEquals(Constants.accessRightsCoarMap.get(ModelConstants.ACCESS_RIGHT_OPEN), gr.getBestaccessright().getCode()); Assertions
.assertEquals(
Constants.accessRightsCoarMap.get(ModelConstants.ACCESS_RIGHT_OPEN), gr.getBestaccessright().getCode());
Assertions.assertEquals(null, gr.getBestaccessright().getOpenAccessRoute()); Assertions.assertEquals(null, gr.getBestaccessright().getOpenAccessRoute());
Assertions.assertEquals("One Ecosystem", gr.getContainer().getName()); Assertions.assertEquals("One Ecosystem", gr.getContainer().getName());
@ -284,11 +359,15 @@ public class DumpJobTest {
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId()); Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
Assertions.assertEquals(2, gr.getOriginalId().size()); Assertions.assertEquals(2, gr.getOriginalId().size());
Assertions.assertTrue(gr.getOriginalId().contains("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2") Assertions
.assertTrue(
gr.getOriginalId().contains("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2")
&& gr.getOriginalId().contains("10.3897/oneeco.2.e13718")); && gr.getOriginalId().contains("10.3897/oneeco.2.e13718"));
Assertions.assertEquals(1, gr.getPid().size()); Assertions.assertEquals(1, gr.getPid().size());
Assertions.assertTrue(gr.getPid().get(0).getScheme().equals("doi") Assertions
.assertTrue(
gr.getPid().get(0).getScheme().equals("doi")
&& gr.getPid().get(0).getValue().equals("10.1016/j.triboint.2014.05.004")); && gr.getPid().get(0).getValue().equals("10.1016/j.triboint.2014.05.004"));
Assertions.assertEquals("2020-03-23T00:20:51.392Z", gr.getDateofcollection()); Assertions.assertEquals("2020-03-23T00:20:51.392Z", gr.getDateofcollection());
@ -298,25 +377,34 @@ public class DumpJobTest {
Instance instance = gr.getInstance().get(0); Instance instance = gr.getInstance().get(0);
Assertions.assertEquals(0, instance.getPid().size()); Assertions.assertEquals(0, instance.getPid().size());
Assertions.assertEquals(1, instance.getAlternateIdentifier().size()); Assertions.assertEquals(1, instance.getAlternateIdentifier().size());
Assertions.assertTrue(instance.getAlternateIdentifier().get(0).getScheme().equals("doi") Assertions
.assertTrue(
instance.getAlternateIdentifier().get(0).getScheme().equals("doi")
&& instance.getAlternateIdentifier().get(0).getValue().equals("10.3897/oneeco.2.e13718")); && instance.getAlternateIdentifier().get(0).getValue().equals("10.3897/oneeco.2.e13718"));
Assertions.assertEquals(null, instance.getLicense()); Assertions.assertEquals(null, instance.getLicense());
Assertions.assertTrue(instance.getAccessright().getCode().equals(Constants.accessRightsCoarMap Assertions
.assertTrue(
instance
.getAccessright()
.getCode()
.equals(
Constants.accessRightsCoarMap
.get(ModelConstants.ACCESS_RIGHT_OPEN))); .get(ModelConstants.ACCESS_RIGHT_OPEN)));
Assertions.assertTrue(instance.getAccessright().getLabel().equals(ModelConstants.ACCESS_RIGHT_OPEN)); Assertions.assertTrue(instance.getAccessright().getLabel().equals(ModelConstants.ACCESS_RIGHT_OPEN));
Assertions.assertTrue(instance.getAccessright().getOpenAccessRoute().equals(OpenAccessRoute.green)); Assertions.assertTrue(instance.getAccessright().getOpenAccessRoute().equals(OpenAccessRoute.green));
Assertions.assertTrue(instance.getType().equals("Article")); Assertions.assertTrue(instance.getType().equals("Article"));
Assertions.assertEquals(2, instance.getUrl().size()); Assertions.assertEquals(2, instance.getUrl().size());
Assertions.assertTrue(instance.getUrl().contains("https://doi.org/10.3897/oneeco.2.e13718") Assertions
.assertTrue(
instance.getUrl().contains("https://doi.org/10.3897/oneeco.2.e13718")
&& instance.getUrl().contains("https://oneecosystem.pensoft.net/article/13718/")); && instance.getUrl().contains("https://oneecosystem.pensoft.net/article/13718/"));
Assertions.assertEquals("2017-01-01",instance.getPublicationdate()); Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
Assertions.assertEquals(null,instance.getArticleprocessingcharge()); Assertions.assertEquals(null, instance.getArticleprocessingcharge());
Assertions.assertEquals("peerReviewed", instance.getRefereed()); Assertions.assertEquals("peerReviewed", instance.getRefereed());
} }
@Test @Test
public void testDatasetDump(){ public void testDatasetDump() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset_extendedinstance") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset_extendedinstance")
.getPath(); .getPath();
@ -327,7 +415,8 @@ public class DumpJobTest {
DumpProducts dump = new DumpProducts(); DumpProducts dump = new DumpProducts();
dump dump
.run(false, sourcePath, workingDir.toString() + "/result", .run(
false, sourcePath, workingDir.toString() + "/result",
communityMapPath, Dataset.class, communityMapPath, Dataset.class,
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType()); GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
@ -344,7 +433,7 @@ public class DumpJobTest {
Assertions.assertEquals(1, verificationDataset.filter("type = 'dataset'").count()); Assertions.assertEquals(1, verificationDataset.filter("type = 'dataset'").count());
//the common fields in the result have been already checked. Now checking only // the common fields in the result have been already checked. Now checking only
// community specific fields // community specific fields
GraphResult gr = verificationDataset.first(); GraphResult gr = verificationDataset.first();
@ -353,10 +442,33 @@ public class DumpJobTest {
Assertions.assertEquals(2, gr.getGeolocation().stream().filter(gl -> gl.getBox().equals("")).count()); Assertions.assertEquals(2, gr.getGeolocation().stream().filter(gl -> gl.getBox().equals("")).count());
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("")).count()); Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("")).count());
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("")).count()); Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("")).count());
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("18 York St, Ottawa, ON K1N 5S6; Ottawa; Ontario; Canada")).count()); Assertions
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("45.427242 -75.693904")).count()); .assertEquals(
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("") && !gl.getPlace().equals("")).count()); 1,
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> !gl.getPoint().equals("") && gl.getPlace().equals("")).count()); gr
.getGeolocation()
.stream()
.filter(gl -> gl.getPlace().equals("18 York St, Ottawa, ON K1N 5S6; Ottawa; Ontario; Canada"))
.count());
Assertions
.assertEquals(
1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("45.427242 -75.693904")).count());
Assertions
.assertEquals(
1,
gr
.getGeolocation()
.stream()
.filter(gl -> gl.getPoint().equals("") && !gl.getPlace().equals(""))
.count());
Assertions
.assertEquals(
1,
gr
.getGeolocation()
.stream()
.filter(gl -> !gl.getPoint().equals("") && gl.getPlace().equals(""))
.count());
Assertions.assertEquals("1024Gb", gr.getSize()); Assertions.assertEquals("1024Gb", gr.getSize());
@ -373,7 +485,7 @@ public class DumpJobTest {
} }
@Test @Test
public void testSoftwareDump(){ public void testSoftwareDump() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software_extendedinstance") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software_extendedinstance")
.getPath(); .getPath();
@ -384,13 +496,13 @@ public class DumpJobTest {
DumpProducts dump = new DumpProducts(); DumpProducts dump = new DumpProducts();
dump dump
.run(false, sourcePath, workingDir.toString() + "/result", .run(
false, sourcePath, workingDir.toString() + "/result",
communityMapPath, Software.class, communityMapPath, Software.class,
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType()); GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<GraphResult> tmp = sc JavaRDD<GraphResult> tmp = sc
.textFile(workingDir.toString() + "/result") .textFile(workingDir.toString() + "/result")
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class)); .map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
@ -412,7 +524,6 @@ public class DumpJobTest {
Assertions.assertEquals("perl", gr.getProgrammingLanguage()); Assertions.assertEquals("perl", gr.getProgrammingLanguage());
Assertions.assertEquals(null, gr.getContainer()); Assertions.assertEquals(null, gr.getContainer());
Assertions.assertEquals(null, gr.getContactperson()); Assertions.assertEquals(null, gr.getContactperson());
Assertions.assertEquals(null, gr.getContactgroup()); Assertions.assertEquals(null, gr.getContactgroup());
@ -424,7 +535,7 @@ public class DumpJobTest {
} }
@Test @Test
public void testOrpDump(){ public void testOrpDump() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp_extendedinstance") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp_extendedinstance")
.getPath(); .getPath();
@ -435,13 +546,13 @@ public class DumpJobTest {
DumpProducts dump = new DumpProducts(); DumpProducts dump = new DumpProducts();
dump dump
.run(false, sourcePath, workingDir.toString() + "/result", .run(
false, sourcePath, workingDir.toString() + "/result",
communityMapPath, OtherResearchProduct.class, communityMapPath, OtherResearchProduct.class,
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType()); GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<GraphResult> tmp = sc JavaRDD<GraphResult> tmp = sc
.textFile(workingDir.toString() + "/result") .textFile(workingDir.toString() + "/result")
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class)); .map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
@ -466,7 +577,6 @@ public class DumpJobTest {
Assertions.assertTrue(gr.getTool().contains("tool1")); Assertions.assertTrue(gr.getTool().contains("tool1"));
Assertions.assertTrue(gr.getTool().contains("tool2")); Assertions.assertTrue(gr.getTool().contains("tool2"));
Assertions.assertEquals(null, gr.getContainer()); Assertions.assertEquals(null, gr.getContainer());
Assertions.assertEquals(null, gr.getDocumentationUrl()); Assertions.assertEquals(null, gr.getDocumentationUrl());
Assertions.assertEquals(null, gr.getCodeRepositoryUrl()); Assertions.assertEquals(null, gr.getCodeRepositoryUrl());
@ -490,7 +600,8 @@ public class DumpJobTest {
DumpProducts dump = new DumpProducts(); DumpProducts dump = new DumpProducts();
dump dump
.run(false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class, .run(
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
CommunityResult.class, Constants.DUMPTYPE.COMMUNITY.getType()); CommunityResult.class, Constants.DUMPTYPE.COMMUNITY.getType());
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -506,7 +617,7 @@ public class DumpJobTest {
Assertions.assertEquals(1, verificationDataset.filter("type = 'publication'").count()); Assertions.assertEquals(1, verificationDataset.filter("type = 'publication'").count());
//the common fields in the result have been already checked. Now checking only // the common fields in the result have been already checked. Now checking only
// community specific fields // community specific fields
CommunityResult cr = verificationDataset.first(); CommunityResult cr = verificationDataset.first();
@ -519,15 +630,20 @@ public class DumpJobTest {
Assertions.assertEquals("0.9", cr.getContext().get(0).getProvenance().get(0).getTrust()); Assertions.assertEquals("0.9", cr.getContext().get(0).getProvenance().get(0).getTrust());
Assertions.assertEquals(1, cr.getCollectedfrom().size()); Assertions.assertEquals(1, cr.getCollectedfrom().size());
Assertions.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey()); Assertions
.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey());
Assertions.assertEquals("Pensoft", cr.getCollectedfrom().get(0).getValue()); Assertions.assertEquals("Pensoft", cr.getCollectedfrom().get(0).getValue());
Assertions.assertEquals(1, cr.getInstance().size()); Assertions.assertEquals(1, cr.getInstance().size());
Assertions.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getInstance().get(0).getCollectedfrom().getKey()); Assertions
.assertEquals(
"10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db",
cr.getInstance().get(0).getCollectedfrom().getKey());
Assertions.assertEquals("Pensoft", cr.getInstance().get(0).getCollectedfrom().getValue()); Assertions.assertEquals("Pensoft", cr.getInstance().get(0).getCollectedfrom().getValue());
Assertions.assertEquals("10|openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey()); Assertions
Assertions.assertEquals("One Ecosystem",cr.getInstance().get(0).getHostedby().getValue()); .assertEquals(
"10|openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey());
Assertions.assertEquals("One Ecosystem", cr.getInstance().get(0).getHostedby().getValue());
} }
@ -587,8 +703,6 @@ public class DumpJobTest {
Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90); Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90);
} }
@Test @Test
@ -650,7 +764,6 @@ public class DumpJobTest {
Assertions.assertEquals(0, verificationDataset.count()); Assertions.assertEquals(0, verificationDataset.count());
} }
@Test @Test
@ -718,7 +831,6 @@ public class DumpJobTest {
Assertions.assertEquals(6, verificationDataset.filter("type = 'software'").count()); Assertions.assertEquals(6, verificationDataset.filter("type = 'software'").count());
} }
@Test @Test
@ -814,7 +926,6 @@ public class DumpJobTest {
Assertions.assertEquals(23, verificationDataset.count()); Assertions.assertEquals(23, verificationDataset.count());
Assertions.assertEquals(23, verificationDataset.filter("type = 'publication'").count()); Assertions.assertEquals(23, verificationDataset.filter("type = 'publication'").count());
verificationDataset.createOrReplaceTempView("check"); verificationDataset.createOrReplaceTempView("check");
@ -832,7 +943,6 @@ public class DumpJobTest {
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1); Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
} }
} }

View File

@ -8,16 +8,17 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.neethi.Assertion;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row; import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -247,50 +248,45 @@ public class PrepareResultProjectJobTest {
org.apache.spark.sql.Dataset<ResultProject> verificationDataset = spark org.apache.spark.sql.Dataset<ResultProject> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(ResultProject.class)); .createDataset(tmp.rdd(), Encoders.bean(ResultProject.class));
Assertions.assertTrue(verificationDataset.count() == 2); assertEquals(2, verificationDataset.count() );
Assertions assertEquals(
.assertEquals(
1, 1,
verificationDataset.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'").count()); verificationDataset.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'").count());
Assertions assertEquals(
.assertEquals(
1, 1,
verificationDataset.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count()); verificationDataset.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count());
verificationDataset.createOrReplaceTempView("dataset"); verificationDataset.createOrReplaceTempView("dataset");
String query = "select resultId, MyT.id project , MyT.title title, MyT.acronym acronym , MyT.provenance.provenance provenance, " + String query = "select resultId, MyT.id project , MyT.title title, MyT.acronym acronym , MyT.provenance.provenance provenance, "
+
"MyT.validated.validatedByFunder, MyT.validated.validationDate " "MyT.validated.validatedByFunder, MyT.validated.validationDate "
+ "from dataset " + "from dataset "
+ "lateral view explode(projectsList) p as MyT "; + "lateral view explode(projectsList) p as MyT ";
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query); org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
Assertions.assertEquals(3, resultExplodedProvenance.count()); assertEquals(3, resultExplodedProvenance.count());
Assertions.assertEquals(3, resultExplodedProvenance.filter("validatedByFunder = true").count()); assertEquals(3, resultExplodedProvenance.filter("validatedByFunder = true").count());
Assertions assertEquals(
.assertEquals(
2, 2,
resultExplodedProvenance resultExplodedProvenance
.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'") .filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
1, 1,
resultExplodedProvenance resultExplodedProvenance
.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'") .filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
2, 2,
resultExplodedProvenance resultExplodedProvenance
.filter("project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6'") .filter("project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
1, 1,
resultExplodedProvenance resultExplodedProvenance
.filter( .filter(
@ -300,8 +296,7 @@ public class PrepareResultProjectJobTest {
"and validationDate = '2021-08-06'") "and validationDate = '2021-08-06'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
1, 1,
resultExplodedProvenance resultExplodedProvenance
.filter( .filter(
@ -310,15 +305,13 @@ public class PrepareResultProjectJobTest {
"and validatedByFunder = true and validationDate = '2021-08-04'") "and validatedByFunder = true and validationDate = '2021-08-04'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
1, 1,
resultExplodedProvenance resultExplodedProvenance
.filter("project = '40|aka_________::03376222b28a3aebf2730ac514818d04'") .filter("project = '40|aka_________::03376222b28a3aebf2730ac514818d04'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
1, 1,
resultExplodedProvenance resultExplodedProvenance
.filter( .filter(
@ -327,8 +320,7 @@ public class PrepareResultProjectJobTest {
"and validatedByFunder = true and validationDate = '2021-08-05'") "and validatedByFunder = true and validationDate = '2021-08-05'")
.count()); .count());
Assertions assertEquals(
.assertEquals(
3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count()); 3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count());
} }

View File

@ -8,8 +8,6 @@ import java.util.HashMap;
import java.util.logging.Filter; import java.util.logging.Filter;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -31,6 +29,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo; import eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo;
import eu.dnetlib.dhp.schema.dump.oaf.Result; import eu.dnetlib.dhp.schema.dump.oaf.Result;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult; import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
public class UpdateProjectInfoTest { public class UpdateProjectInfoTest {
@ -142,7 +142,7 @@ public class UpdateProjectInfoTest {
} }
@Test @Test
public void testValidatedRelation() throws Exception{ public void testValidatedRelation() throws Exception {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/addProjectInfo") .getResource("/eu/dnetlib/dhp/oa/graph/dump/addProjectInfo")
.getPath(); .getPath();
@ -199,8 +199,15 @@ public class UpdateProjectInfoTest {
.count()); .count());
Project project = verificationDataset Project project = verificationDataset
.map((MapFunction<CommunityResult, Project>) cr -> cr.getProjects().stream().filter(p -> p.getValidated() != null).collect(Collectors.toList()).get(0) .map(
, Encoders.bean(Project.class)).first(); (MapFunction<CommunityResult, Project>) cr -> cr
.getProjects()
.stream()
.filter(p -> p.getValidated() != null)
.collect(Collectors.toList())
.get(0),
Encoders.bean(Project.class))
.first();
Assertions.assertTrue(project.getFunder().getName().equals("Academy of Finland")); Assertions.assertTrue(project.getFunder().getName().equals("Academy of Finland"));
Assertions.assertTrue(project.getFunder().getShortName().equals("AKA")); Assertions.assertTrue(project.getFunder().getShortName().equals("AKA"));
@ -208,18 +215,22 @@ public class UpdateProjectInfoTest {
Assertions.assertTrue(project.getFunder().getFundingStream() == null); Assertions.assertTrue(project.getFunder().getFundingStream() == null);
Assertions.assertTrue(project.getValidated().getValidationDate().equals("2021-08-06")); Assertions.assertTrue(project.getValidated().getValidationDate().equals("2021-08-06"));
project = verificationDataset project = verificationDataset
.map((MapFunction<CommunityResult, Project>) cr -> cr.getProjects().stream().filter(p -> p.getValidated() == null).collect(Collectors.toList()).get(0) .map(
, Encoders.bean(Project.class)).first(); (MapFunction<CommunityResult, Project>) cr -> cr
.getProjects()
.stream()
.filter(p -> p.getValidated() == null)
.collect(Collectors.toList())
.get(0),
Encoders.bean(Project.class))
.first();
Assertions.assertTrue(project.getFunder().getName().equals("European Commission")); Assertions.assertTrue(project.getFunder().getName().equals("European Commission"));
Assertions.assertTrue(project.getFunder().getShortName().equals("EC")); Assertions.assertTrue(project.getFunder().getShortName().equals("EC"));
Assertions.assertTrue(project.getFunder().getJurisdiction().equals("EU")); Assertions.assertTrue(project.getFunder().getJurisdiction().equals("EU"));
Assertions.assertTrue(project.getFunder().getFundingStream().equals("H2020")); Assertions.assertTrue(project.getFunder().getFundingStream().equals("H2020"));
} }
} }

View File

@ -94,7 +94,8 @@ public class DumpRelationTest {
verificationDataset.createOrReplaceTempView("table"); verificationDataset.createOrReplaceTempView("table");
verificationDataset.foreach((ForeachFunction<Relation>)r -> System.out.println(new ObjectMapper().writeValueAsString(r))); verificationDataset
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
Dataset<Row> check = spark Dataset<Row> check = spark
.sql( .sql(
@ -156,7 +157,8 @@ public class DumpRelationTest {
verificationDataset.createOrReplaceTempView("table"); verificationDataset.createOrReplaceTempView("table");
verificationDataset.foreach((ForeachFunction<Relation>)r -> System.out.println(new ObjectMapper().writeValueAsString(r))); verificationDataset
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
Dataset<Row> check = spark Dataset<Row> check = spark
.sql( .sql(

View File

@ -6,7 +6,6 @@ import static org.mockito.Mockito.lenient;
import java.util.*; import java.util.*;
import java.util.function.Consumer; import java.util.function.Consumer;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -14,6 +13,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
@ -529,7 +529,8 @@ class QueryInformationSystemTest {
List<ContextInfo> cInfoList = new ArrayList<>(); List<ContextInfo> cInfoList = new ArrayList<>();
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci); final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
queryInformationSystem.execContextRelationQuery(); queryInformationSystem.execContextRelationQuery();
queryInformationSystem.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource")); queryInformationSystem
.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
Assertions.assertEquals(5, cInfoList.size()); Assertions.assertEquals(5, cInfoList.size());
} }
@ -540,7 +541,8 @@ class QueryInformationSystemTest {
List<ContextInfo> cInfoList = new ArrayList<>(); List<ContextInfo> cInfoList = new ArrayList<>();
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci); final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
queryInformationSystem.execContextRelationQuery(); queryInformationSystem.execContextRelationQuery();
queryInformationSystem.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource")); queryInformationSystem
.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
cInfoList.forEach(contextInfo -> { cInfoList.forEach(contextInfo -> {
switch (contextInfo.getId()) { switch (contextInfo.getId()) {

View File

@ -84,7 +84,8 @@ public class IndexRecordTransformerTest {
@Test @Test
public void testForEOSCFutureTraining() throws IOException, TransformerException { public void testForEOSCFutureTraining() throws IOException, TransformerException {
final String record = IOUtils.toString(getClass().getResourceAsStream("eosc-future/training-notebooks-seadatanet.xml")); final String record = IOUtils
.toString(getClass().getResourceAsStream("eosc-future/training-notebooks-seadatanet.xml"));
testRecordTransformation(record); testRecordTransformation(record);
} }

View File

@ -753,7 +753,7 @@
<mockito-core.version>3.3.3</mockito-core.version> <mockito-core.version>3.3.3</mockito-core.version>
<mongodb.driver.version>3.4.2</mongodb.driver.version> <mongodb.driver.version>3.4.2</mongodb.driver.version>
<vtd.version>[2.12,3.0)</vtd.version> <vtd.version>[2.12,3.0)</vtd.version>
<dhp-schemas.version>[2.7.18]</dhp-schemas.version> <dhp-schemas.version>[2.7.19]</dhp-schemas.version>
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version> <dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version> <dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version> <dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>