forked from D-Net/dnet-hadoop
This commit is contained in:
parent
c8321ad31a
commit
c4ccd7b32c
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.dedup;
|
package eu.dnetlib.dhp.oa.dedup;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -35,117 +36,124 @@ import scala.Tuple3;
|
||||||
|
|
||||||
public class SparkWhitelistSimRels extends AbstractSparkAction {
|
public class SparkWhitelistSimRels extends AbstractSparkAction {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(SparkCreateSimRels.class);
|
private static final Logger log = LoggerFactory.getLogger(SparkCreateSimRels.class);
|
||||||
|
|
||||||
private static final String WHITELIST_SEPARATOR = "####";
|
private static final String WHITELIST_SEPARATOR = "####";
|
||||||
|
|
||||||
public SparkWhitelistSimRels(ArgumentApplicationParser parser, SparkSession spark) {
|
public SparkWhitelistSimRels(ArgumentApplicationParser parser, SparkSession spark) {
|
||||||
super(parser, spark);
|
super(parser, spark);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
IOUtils
|
IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
SparkCreateSimRels.class
|
SparkCreateSimRels.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/dedup/whitelistSimRels_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/whitelistSimRels_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
new SparkWhitelistSimRels(parser, getSparkSession(conf))
|
new SparkWhitelistSimRels(parser, getSparkSession(conf))
|
||||||
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run(ISLookUpService isLookUpService)
|
public void run(ISLookUpService isLookUpService)
|
||||||
throws DocumentException, IOException, ISLookUpException, SAXException {
|
throws DocumentException, IOException, ISLookUpException, SAXException {
|
||||||
|
|
||||||
// read oozie parameters
|
// read oozie parameters
|
||||||
final String graphBasePath = parser.get("graphBasePath");
|
final String graphBasePath = parser.get("graphBasePath");
|
||||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||||
final String actionSetId = parser.get("actionSetId");
|
final String actionSetId = parser.get("actionSetId");
|
||||||
final String workingPath = parser.get("workingPath");
|
final String workingPath = parser.get("workingPath");
|
||||||
final int numPartitions = Optional
|
final int numPartitions = Optional
|
||||||
.ofNullable(parser.get("numPartitions"))
|
.ofNullable(parser.get("numPartitions"))
|
||||||
.map(Integer::valueOf)
|
.map(Integer::valueOf)
|
||||||
.orElse(NUM_PARTITIONS);
|
.orElse(NUM_PARTITIONS);
|
||||||
final String whiteListPath = parser.get("whiteListPath");
|
final String whiteListPath = parser.get("whiteListPath");
|
||||||
|
|
||||||
log.info("numPartitions: '{}'", numPartitions);
|
log.info("numPartitions: '{}'", numPartitions);
|
||||||
log.info("graphBasePath: '{}'", graphBasePath);
|
log.info("graphBasePath: '{}'", graphBasePath);
|
||||||
log.info("isLookUpUrl: '{}'", isLookUpUrl);
|
log.info("isLookUpUrl: '{}'", isLookUpUrl);
|
||||||
log.info("actionSetId: '{}'", actionSetId);
|
log.info("actionSetId: '{}'", actionSetId);
|
||||||
log.info("workingPath: '{}'", workingPath);
|
log.info("workingPath: '{}'", workingPath);
|
||||||
log.info("whiteListPath: '{}'", whiteListPath);
|
log.info("whiteListPath: '{}'", whiteListPath);
|
||||||
|
|
||||||
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
//file format: source####target
|
// file format: source####target
|
||||||
Dataset<Tuple2<String, String>> whiteListRels = spark.createDataset(sc
|
Dataset<Tuple2<String, String>> whiteListRels = spark
|
||||||
.textFile(whiteListPath)
|
.createDataset(
|
||||||
//check if the line is in the correct format: id1####id2
|
sc
|
||||||
.filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2)
|
.textFile(whiteListPath)
|
||||||
.map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1]))
|
// check if the line is in the correct format: id1####id2
|
||||||
.rdd(),
|
.filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2)
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
.map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1]))
|
||||||
|
.rdd(),
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
// for each dedup configuration
|
// for each dedup configuration
|
||||||
for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) {
|
for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) {
|
||||||
|
|
||||||
final String entity = dedupConf.getWf().getEntityType();
|
final String entity = dedupConf.getWf().getEntityType();
|
||||||
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
||||||
log.info("Adding whitelist simrels for: '{}'", subEntity);
|
log.info("Adding whitelist simrels for: '{}'", subEntity);
|
||||||
|
|
||||||
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
||||||
|
|
||||||
Dataset<Tuple2<String, String>> entities = spark.createDataset(sc
|
Dataset<Tuple2<String, String>> entities = spark
|
||||||
.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
|
.createDataset(
|
||||||
.repartition(numPartitions)
|
sc
|
||||||
.mapToPair(
|
.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
|
||||||
(PairFunction<String, String, String>) s -> {
|
.repartition(numPartitions)
|
||||||
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
.mapToPair(
|
||||||
return new Tuple2<>(d.getIdentifier(), "present");
|
(PairFunction<String, String, String>) s -> {
|
||||||
})
|
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||||
.rdd(),
|
return new Tuple2<>(d.getIdentifier(), "present");
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
})
|
||||||
|
.rdd(),
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels
|
Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels
|
||||||
.joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner")
|
.joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner")
|
||||||
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
.map(
|
||||||
|
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1
|
Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1
|
||||||
.joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner")
|
.joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner")
|
||||||
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
.map(
|
||||||
|
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
Dataset<Relation> whiteListSimRels = whiteListRels2
|
Dataset<Relation> whiteListSimRels = whiteListRels2
|
||||||
.map((MapFunction<Tuple2<String, String>, Relation>)
|
.map(
|
||||||
r -> createSimRel(r._1(), r._2(), entity),
|
(MapFunction<Tuple2<String, String>, Relation>) r -> createSimRel(r._1(), r._2(), entity),
|
||||||
Encoders.bean(Relation.class)
|
Encoders.bean(Relation.class));
|
||||||
);
|
|
||||||
|
|
||||||
saveParquet(whiteListSimRels, outputPath, SaveMode.Append);
|
saveParquet(whiteListSimRels, outputPath, SaveMode.Append);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Relation createSimRel(String source, String target, String entity) {
|
private Relation createSimRel(String source, String target, String entity) {
|
||||||
final Relation r = new Relation();
|
final Relation r = new Relation();
|
||||||
r.setSource(source);
|
r.setSource(source);
|
||||||
r.setTarget(target);
|
r.setTarget(target);
|
||||||
r.setSubRelType("dedupSimilarity");
|
r.setSubRelType("dedupSimilarity");
|
||||||
r.setRelClass("isSimilarTo");
|
r.setRelClass("isSimilarTo");
|
||||||
r.setDataInfo(new DataInfo());
|
r.setDataInfo(new DataInfo());
|
||||||
|
|
||||||
switch (entity) {
|
switch (entity) {
|
||||||
case "result":
|
case "result":
|
||||||
r.setRelType("resultResult");
|
r.setRelType("resultResult");
|
||||||
break;
|
break;
|
||||||
case "organization":
|
case "organization":
|
||||||
r.setRelType("organizationOrganization");
|
r.setRelType("organizationOrganization");
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException("unmanaged entity type: " + entity);
|
throw new IllegalArgumentException("unmanaged entity type: " + entity);
|
||||||
}
|
}
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -69,7 +69,7 @@ public class PropagationConstant {
|
||||||
PROPAGATION_DATA_INFO_TYPE,
|
PROPAGATION_DATA_INFO_TYPE,
|
||||||
PROPAGATION_COUNTRY_INSTREPO_CLASS_ID,
|
PROPAGATION_COUNTRY_INSTREPO_CLASS_ID,
|
||||||
PROPAGATION_COUNTRY_INSTREPO_CLASS_NAME,
|
PROPAGATION_COUNTRY_INSTREPO_CLASS_NAME,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS));
|
ModelConstants.DNET_PROVENANCE_ACTIONS));
|
||||||
return nc;
|
return nc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,8 @@ public class PropagationConstant {
|
||||||
return di;
|
return di;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Qualifier getQualifier(String inference_class_id, String inference_class_name, String qualifierSchema) {
|
public static Qualifier getQualifier(String inference_class_id, String inference_class_name,
|
||||||
|
String qualifierSchema) {
|
||||||
Qualifier pa = new Qualifier();
|
Qualifier pa = new Qualifier();
|
||||||
pa.setClassid(inference_class_id);
|
pa.setClassid(inference_class_id);
|
||||||
pa.setClassname(inference_class_name);
|
pa.setClassname(inference_class_name);
|
||||||
|
@ -108,7 +109,11 @@ public class PropagationConstant {
|
||||||
r.setRelClass(rel_class);
|
r.setRelClass(rel_class);
|
||||||
r.setRelType(rel_type);
|
r.setRelType(rel_type);
|
||||||
r.setSubRelType(subrel_type);
|
r.setSubRelType(subrel_type);
|
||||||
r.setDataInfo(getDataInfo(inference_provenance, inference_class_id, inference_class_name, ModelConstants.DNET_PROVENANCE_ACTIONS));
|
r
|
||||||
|
.setDataInfo(
|
||||||
|
getDataInfo(
|
||||||
|
inference_provenance, inference_class_id, inference_class_name,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS));
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -173,14 +173,17 @@ public class SparkOrcidToResultFromSemRelJob {
|
||||||
if (toaddpid) {
|
if (toaddpid) {
|
||||||
StructuredProperty p = new StructuredProperty();
|
StructuredProperty p = new StructuredProperty();
|
||||||
p.setValue(autoritative_author.getOrcid());
|
p.setValue(autoritative_author.getOrcid());
|
||||||
p.setQualifier(getQualifier(ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME, ModelConstants.DNET_PID_TYPES));
|
p
|
||||||
|
.setQualifier(
|
||||||
|
getQualifier(
|
||||||
|
ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME, ModelConstants.DNET_PID_TYPES));
|
||||||
p
|
p
|
||||||
.setDataInfo(
|
.setDataInfo(
|
||||||
getDataInfo(
|
getDataInfo(
|
||||||
PROPAGATION_DATA_INFO_TYPE,
|
PROPAGATION_DATA_INFO_TYPE,
|
||||||
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID,
|
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID,
|
||||||
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME,
|
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS));
|
ModelConstants.DNET_PROVENANCE_ACTIONS));
|
||||||
|
|
||||||
Optional<List<StructuredProperty>> authorPid = Optional.ofNullable(author.getPid());
|
Optional<List<StructuredProperty>> authorPid = Optional.ofNullable(author.getPid());
|
||||||
if (authorPid.isPresent()) {
|
if (authorPid.isPresent()) {
|
||||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -22,6 +21,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
import eu.dnetlib.dhp.schema.oaf.Context;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
@ -130,7 +130,7 @@ public class SparkResultToCommunityFromOrganizationJob {
|
||||||
PROPAGATION_DATA_INFO_TYPE,
|
PROPAGATION_DATA_INFO_TYPE,
|
||||||
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID,
|
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID,
|
||||||
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME,
|
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS)));
|
ModelConstants.DNET_PROVENANCE_ACTIONS)));
|
||||||
propagatedContexts.add(newContext);
|
propagatedContexts.add(newContext);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -20,6 +19,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList;
|
import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ public class SparkResultToCommunityThroughSemRelJob {
|
||||||
PROPAGATION_DATA_INFO_TYPE,
|
PROPAGATION_DATA_INFO_TYPE,
|
||||||
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_ID,
|
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_ID,
|
||||||
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_NAME,
|
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_NAME,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS)));
|
ModelConstants.DNET_PROVENANCE_ACTIONS)));
|
||||||
return newContext;
|
return newContext;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -6,6 +6,8 @@ import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
||||||
|
@ -23,8 +25,6 @@ import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
|
|
||||||
public class ResultMapper implements Serializable {
|
public class ResultMapper implements Serializable {
|
||||||
|
|
||||||
|
@ -278,16 +278,17 @@ public class ResultMapper implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(input.getPid())
|
.ofNullable(input.getPid())
|
||||||
.ifPresent(
|
.ifPresent(
|
||||||
value -> out.setPid(value
|
value -> out
|
||||||
.stream()
|
.setPid(
|
||||||
.map(
|
value
|
||||||
p ->
|
.stream()
|
||||||
ControlledField
|
.map(
|
||||||
.newInstance(p.getQualifier().getClassid(), p.getValue())).collect(Collectors.toList())));
|
p -> ControlledField
|
||||||
|
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
oStr = Optional.ofNullable(input.getDateofacceptance());
|
oStr = Optional.ofNullable(input.getDateofacceptance());
|
||||||
if (oStr.isPresent()) {
|
if (oStr.isPresent()) {
|
||||||
|
@ -298,11 +299,10 @@ public class ResultMapper implements Serializable {
|
||||||
out.setPublisher(oStr.get().getValue());
|
out.setPublisher(oStr.get().getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(input.getSource())
|
.ofNullable(input.getSource())
|
||||||
.ifPresent(value -> out.setSource(value.stream().map(s -> s.getValue()).collect(Collectors.toList()) ));
|
.ifPresent(value -> out.setSource(value.stream().map(s -> s.getValue()).collect(Collectors.toList())));
|
||||||
// value.stream().forEach(s -> sourceList.add(s.getValue())));
|
// value.stream().forEach(s -> sourceList.add(s.getValue())));
|
||||||
// out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList()));
|
// out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList()));
|
||||||
List<Subject> subjectList = new ArrayList<>();
|
List<Subject> subjectList = new ArrayList<>();
|
||||||
Optional
|
Optional
|
||||||
|
@ -577,48 +577,60 @@ public class ResultMapper implements Serializable {
|
||||||
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
||||||
if (di.isPresent()) {
|
if (di.isPresent()) {
|
||||||
return Pid
|
return Pid
|
||||||
.newInstance(
|
.newInstance(
|
||||||
ControlledField
|
ControlledField
|
||||||
.newInstance(
|
.newInstance(
|
||||||
pid.getQualifier().getClassid(),
|
pid.getQualifier().getClassid(),
|
||||||
pid.getValue()),
|
pid.getValue()),
|
||||||
Provenance
|
Provenance
|
||||||
.newInstance(
|
.newInstance(
|
||||||
di.get().getProvenanceaction().getClassname(),
|
di.get().getProvenanceaction().getClassname(),
|
||||||
di.get().getTrust()));
|
di.get().getTrust()));
|
||||||
} else {
|
} else {
|
||||||
return Pid
|
return Pid
|
||||||
.newInstance(
|
.newInstance(
|
||||||
ControlledField
|
ControlledField
|
||||||
.newInstance(
|
.newInstance(
|
||||||
pid.getQualifier().getClassid(),
|
pid.getQualifier().getClassid(),
|
||||||
pid.getValue())
|
pid.getValue())
|
||||||
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Pid getOrcid(List<StructuredProperty> p) {
|
private static Pid getOrcid(List<StructuredProperty> p) {
|
||||||
List<StructuredProperty> pid_list = p.stream().map(pid -> {
|
List<StructuredProperty> pid_list = p.stream().map(pid -> {
|
||||||
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
|
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
|
||||||
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))){
|
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))) {
|
||||||
return pid;
|
return pid;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}).filter(pid -> pid != null).collect(Collectors.toList());
|
}).filter(pid -> pid != null).collect(Collectors.toList());
|
||||||
|
|
||||||
if(pid_list.size() == 1){
|
if (pid_list.size() == 1) {
|
||||||
return getAuthorPid(pid_list.get(0));
|
return getAuthorPid(pid_list.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<StructuredProperty> orcid = pid_list.stream().filter(ap -> ap.getQualifier().getClassid()
|
List<StructuredProperty> orcid = pid_list
|
||||||
.equals(ModelConstants.ORCID)).collect(Collectors.toList());
|
.stream()
|
||||||
if(orcid.size() == 1){
|
.filter(
|
||||||
|
ap -> ap
|
||||||
|
.getQualifier()
|
||||||
|
.getClassid()
|
||||||
|
.equals(ModelConstants.ORCID))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
if (orcid.size() == 1) {
|
||||||
return getAuthorPid(orcid.get(0));
|
return getAuthorPid(orcid.get(0));
|
||||||
}
|
}
|
||||||
orcid = pid_list.stream().filter(ap -> ap.getQualifier().getClassid()
|
orcid = pid_list
|
||||||
.equals(ModelConstants.ORCID_PENDING)).collect(Collectors.toList());
|
.stream()
|
||||||
if(orcid.size() == 1){
|
.filter(
|
||||||
|
ap -> ap
|
||||||
|
.getQualifier()
|
||||||
|
.getClassid()
|
||||||
|
.equals(ModelConstants.ORCID_PENDING))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
if (orcid.size() == 1) {
|
||||||
return getAuthorPid(orcid.get(0));
|
return getAuthorPid(orcid.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -86,10 +86,10 @@ public class SaveCommunityMap implements Serializable {
|
||||||
|
|
||||||
private void saveCommunityMap(boolean singleCommunity, String community_id)
|
private void saveCommunityMap(boolean singleCommunity, String community_id)
|
||||||
throws ISLookUpException, IOException, DocumentException, SAXException {
|
throws ISLookUpException, IOException, DocumentException, SAXException {
|
||||||
writer
|
writer
|
||||||
.write(
|
.write(
|
||||||
Utils.OBJECT_MAPPER
|
Utils.OBJECT_MAPPER
|
||||||
.writeValueAsString(queryInformationSystem.getCommunityMap(singleCommunity, community_id)));
|
.writeValueAsString(queryInformationSystem.getCommunityMap(singleCommunity, community_id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,6 @@ import java.io.StringReader;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Validated;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -28,9 +26,11 @@ import org.xml.sax.SAXException;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Validated;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
@ -80,7 +80,9 @@ public class SparkPrepareResultProject implements Serializable {
|
||||||
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) {
|
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) {
|
||||||
Dataset<Relation> relation = Utils
|
Dataset<Relation> relation = Utils
|
||||||
.readPath(spark, inputPath + "/relation", Relation.class)
|
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||||
.filter("dataInfo.deletedbyinference = false and lower(relClass) = '" + ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
.filter(
|
||||||
|
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
||||||
|
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
||||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
|
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
|
||||||
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
||||||
|
|
||||||
|
@ -159,7 +161,7 @@ public class SparkPrepareResultProject implements Serializable {
|
||||||
provenance.setTrust(di.get().getTrust());
|
provenance.setTrust(di.get().getTrust());
|
||||||
p.setProvenance(provenance);
|
p.setProvenance(provenance);
|
||||||
}
|
}
|
||||||
if (relation.getValidated()){
|
if (relation.getValidated()) {
|
||||||
p.setValidated(Validated.newInstance(relation.getValidated(), relation.getValidationDate()));
|
p.setValidated(Validated.newInstance(relation.getValidated(), relation.getValidationDate()));
|
||||||
}
|
}
|
||||||
return p;
|
return p;
|
||||||
|
|
|
@ -9,7 +9,6 @@ import java.io.StringReader;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.ForeachFunction;
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
|
@ -23,6 +22,8 @@ import org.dom4j.DocumentException;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
@ -453,18 +454,20 @@ public class DumpGraphEntities implements Serializable {
|
||||||
|
|
||||||
private static <E extends OafEntity> void organizationMap(SparkSession spark, String inputPath, String outputPath,
|
private static <E extends OafEntity> void organizationMap(SparkSession spark, String inputPath, String outputPath,
|
||||||
Class<E> inputClazz) {
|
Class<E> inputClazz) {
|
||||||
Utils.readPath(spark, inputPath, inputClazz)
|
Utils
|
||||||
.map(
|
.readPath(spark, inputPath, inputClazz)
|
||||||
(MapFunction<E, Organization>) o -> mapOrganization((eu.dnetlib.dhp.schema.oaf.Organization) o),
|
.map(
|
||||||
Encoders.bean(Organization.class))
|
(MapFunction<E, Organization>) o -> mapOrganization((eu.dnetlib.dhp.schema.oaf.Organization) o),
|
||||||
.filter((FilterFunction<Organization>) o -> o!= null)
|
Encoders.bean(Organization.class))
|
||||||
.write()
|
.filter((FilterFunction<Organization>) o -> o != null)
|
||||||
.mode(SaveMode.Overwrite)
|
.write()
|
||||||
.option("compression", "gzip")
|
.mode(SaveMode.Overwrite)
|
||||||
.json(outputPath);
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static eu.dnetlib.dhp.schema.dump.oaf.graph.Organization mapOrganization(eu.dnetlib.dhp.schema.oaf.Organization org) {
|
private static eu.dnetlib.dhp.schema.dump.oaf.graph.Organization mapOrganization(
|
||||||
|
eu.dnetlib.dhp.schema.oaf.Organization org) {
|
||||||
if (org.getDataInfo().getDeletedbyinference())
|
if (org.getDataInfo().getDeletedbyinference())
|
||||||
return null;
|
return null;
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
|
|
|
@ -5,8 +5,6 @@ import java.io.StringReader;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.DocumentException;
|
import org.dom4j.DocumentException;
|
||||||
import org.dom4j.Element;
|
import org.dom4j.Element;
|
||||||
|
@ -15,6 +13,8 @@ import org.dom4j.io.SAXReader;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.xml.sax.SAXException;
|
import org.xml.sax.SAXException;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
@ -140,7 +140,7 @@ public class QueryInformationSystem {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String makeOpenaireId(Node el, String prefix) {
|
private String makeOpenaireId(Node el, String prefix) {
|
||||||
if (!prefix.equals(ModelSupport.entityIdPrefix.get("project"))){
|
if (!prefix.equals(ModelSupport.entityIdPrefix.get("project"))) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
String funder = null;
|
String funder = null;
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class SparkDumpRelationJob implements Serializable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(relation.getValidated()){
|
if (relation.getValidated()) {
|
||||||
rel_new.setValidated(relation.getValidated());
|
rel_new.setValidated(relation.getValidated());
|
||||||
rel_new.setValidationDate(relation.getValidationDate());
|
rel_new.setValidationDate(relation.getValidationDate());
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,9 +41,9 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged = Optional
|
Boolean isSparkSessionManaged = Optional
|
||||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String inputPath = parser.get("sourcePath");
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
@ -58,32 +58,31 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
writeResultProjectList(spark, inputPath, outputPath, graphPath);
|
writeResultProjectList(spark, inputPath, outputPath, graphPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath,
|
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath,
|
||||||
String graphPath) {
|
String graphPath) {
|
||||||
|
|
||||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> project = Utils
|
Dataset<eu.dnetlib.dhp.schema.oaf.Project> project = Utils
|
||||||
.readPath(spark, graphPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
.readPath(spark, graphPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
||||||
|
|
||||||
Dataset<CommunityResult> result = Utils
|
Dataset<CommunityResult> result = Utils
|
||||||
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
||||||
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||||
|
|
||||||
|
|
||||||
List<String> funderList = project
|
List<String> funderList = project
|
||||||
.select("id")
|
.select("id")
|
||||||
.map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING())
|
.map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING())
|
||||||
.distinct()
|
.distinct()
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
|
||||||
funderList.forEach(funder -> {
|
funderList.forEach(funder -> {
|
||||||
String fundernsp = funder.substring(3);
|
String fundernsp = funder.substring(3);
|
||||||
|
@ -104,7 +103,7 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void dumpResults(String nsp, Dataset<CommunityResult> results, String outputPath,
|
private static void dumpResults(String nsp, Dataset<CommunityResult> results, String outputPath,
|
||||||
String funderName) {
|
String funderName) {
|
||||||
|
|
||||||
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
||||||
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
||||||
|
@ -123,15 +122,15 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}, Encoders.bean(CommunityResult.class))
|
}, Encoders.bean(CommunityResult.class))
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + "/" + funderName);
|
.json(outputPath + "/" + funderName);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeFunderResult(String funder, Dataset<CommunityResult> results, String outputPath,
|
private static void writeFunderResult(String funder, Dataset<CommunityResult> results, String outputPath,
|
||||||
String funderDump) {
|
String funderDump) {
|
||||||
|
|
||||||
if (funder.startsWith("40|irb")) {
|
if (funder.startsWith("40|irb")) {
|
||||||
dumpResults(funder, results, outputPath, "HRZZ");
|
dumpResults(funder, results, outputPath, "HRZZ");
|
||||||
|
|
|
@ -80,7 +80,6 @@ public class SparkResultLinkedToProject implements Serializable {
|
||||||
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
||||||
String inputPath, String outputPath, String graphPath) {
|
String inputPath, String outputPath, String graphPath) {
|
||||||
|
|
||||||
|
|
||||||
Dataset<R> results = Utils
|
Dataset<R> results = Utils
|
||||||
.readPath(spark, inputPath, inputClazz)
|
.readPath(spark, inputPath, inputClazz)
|
||||||
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
||||||
|
|
|
@ -7,17 +7,10 @@ import java.nio.file.Path;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
||||||
import com.sun.xml.internal.ws.policy.AssertionSet;
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.neethi.Assertion;
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.api.java.function.ForeachFunction;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.Row;
|
import org.apache.spark.sql.Row;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
@ -25,10 +18,14 @@ import org.junit.jupiter.api.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.gson.Gson;
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
@ -145,70 +142,121 @@ public class DumpJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPublicationDump(){
|
public void testPublicationDump() {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(
|
.run(
|
||||||
// false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
// false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
||||||
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
||||||
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<GraphResult> tmp = sc
|
JavaRDD<GraphResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
Assertions.assertEquals(1, verificationDataset.count());
|
||||||
|
|
||||||
GraphResult gr = verificationDataset.first();
|
GraphResult gr = verificationDataset.first();
|
||||||
|
|
||||||
|
|
||||||
Assertions.assertEquals(2, gr.getMeasures().size());
|
Assertions.assertEquals(2, gr.getMeasures().size());
|
||||||
Assertions.assertTrue(gr.getMeasures().stream().anyMatch(m -> m.getKey().equals("influence")
|
Assertions
|
||||||
&& m.getValue().equals("1.62759106106e-08")));
|
.assertTrue(
|
||||||
Assertions.assertTrue(gr.getMeasures().stream().anyMatch(m -> m.getKey().equals("popularity")
|
gr
|
||||||
&& m.getValue().equals("0.22519296")));
|
.getMeasures()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
m -> m.getKey().equals("influence")
|
||||||
|
&& m.getValue().equals("1.62759106106e-08")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
gr
|
||||||
|
.getMeasures()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
m -> m.getKey().equals("popularity")
|
||||||
|
&& m.getValue().equals("0.22519296")));
|
||||||
|
|
||||||
Assertions.assertEquals(6, gr.getAuthor().size());
|
Assertions.assertEquals(6, gr.getAuthor().size());
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Nikolaidou,Charitini") &&
|
Assertions
|
||||||
a.getName().equals("Charitini") && a.getSurname().equals("Nikolaidou")
|
.assertTrue(
|
||||||
&& a.getRank() == 1 && a.getPid() == null));
|
gr
|
||||||
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Nikolaidou,Charitini") &&
|
||||||
|
a.getName().equals("Charitini") && a.getSurname().equals("Nikolaidou")
|
||||||
|
&& a.getRank() == 1 && a.getPid() == null));
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Votsi,Nefta") &&
|
Assertions
|
||||||
a.getName().equals("Nefta") && a.getSurname().equals("Votsi")
|
.assertTrue(
|
||||||
&& a.getRank() == 2 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
|
gr
|
||||||
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null));
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Votsi,Nefta") &&
|
||||||
|
a.getName().equals("Nefta") && a.getSurname().equals("Votsi")
|
||||||
|
&& a.getRank() == 2 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
|
||||||
|
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178")
|
||||||
|
&& a.getPid().getProvenance() != null));
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Sgardelis,Steanos") &&
|
Assertions
|
||||||
a.getName().equals("Steanos") && a.getSurname().equals("Sgardelis")
|
.assertTrue(
|
||||||
&& a.getRank() == 3 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
|
gr
|
||||||
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null));
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Sgardelis,Steanos") &&
|
||||||
|
a.getName().equals("Steanos") && a.getSurname().equals("Sgardelis")
|
||||||
|
&& a.getRank() == 3 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
|
||||||
|
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178")
|
||||||
|
&& a.getPid().getProvenance() != null));
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Halley,John") &&
|
Assertions
|
||||||
a.getName().equals("John") && a.getSurname().equals("Halley")
|
.assertTrue(
|
||||||
&& a.getRank() == 4 && a.getPid() == null));
|
gr
|
||||||
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Halley,John") &&
|
||||||
|
a.getName().equals("John") && a.getSurname().equals("Halley")
|
||||||
|
&& a.getRank() == 4 && a.getPid() == null));
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Pantis,John") &&
|
Assertions
|
||||||
a.getName().equals("John") && a.getSurname().equals("Pantis")
|
.assertTrue(
|
||||||
&& a.getRank() == 5 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
|
gr
|
||||||
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null));
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Pantis,John") &&
|
||||||
|
a.getName().equals("John") && a.getSurname().equals("Pantis")
|
||||||
|
&& a.getRank() == 5 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID)
|
||||||
|
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178")
|
||||||
|
&& a.getPid().getProvenance() != null));
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getAuthor().stream().anyMatch(a -> a.getFullname().equals("Tsiafouli,Maria") &&
|
Assertions
|
||||||
a.getName().equals("Maria") && a.getSurname().equals("Tsiafouli")
|
.assertTrue(
|
||||||
&& a.getRank() == 6 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
|
gr
|
||||||
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178") && a.getPid().getProvenance() != null));
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
a -> a.getFullname().equals("Tsiafouli,Maria") &&
|
||||||
|
a.getName().equals("Maria") && a.getSurname().equals("Tsiafouli")
|
||||||
|
&& a.getRank() == 6 && a.getPid().getId().getScheme().equals(ModelConstants.ORCID_PENDING)
|
||||||
|
&& a.getPid().getId().getValue().equals("0000-0001-6651-1178")
|
||||||
|
&& a.getPid().getProvenance() != null));
|
||||||
|
|
||||||
Assertions.assertEquals("publication", gr.getType());
|
Assertions.assertEquals("publication", gr.getType());
|
||||||
|
|
||||||
|
@ -216,27 +264,52 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals("English", gr.getLanguage().getLabel());
|
Assertions.assertEquals("English", gr.getLanguage().getLabel());
|
||||||
|
|
||||||
Assertions.assertEquals(1, gr.getCountry().size());
|
Assertions.assertEquals(1, gr.getCountry().size());
|
||||||
Assertions.assertEquals("IT" , gr.getCountry().get(0).getCode());
|
Assertions.assertEquals("IT", gr.getCountry().get(0).getCode());
|
||||||
Assertions.assertEquals("Italy" , gr.getCountry().get(0).getLabel());
|
Assertions.assertEquals("Italy", gr.getCountry().get(0).getLabel());
|
||||||
Assertions.assertTrue( gr.getCountry().get(0).getProvenance() == null);
|
Assertions.assertTrue(gr.getCountry().get(0).getProvenance() == null);
|
||||||
|
|
||||||
Assertions.assertEquals(12, gr.getSubjects().size());
|
Assertions.assertEquals(12, gr.getSubjects().size());
|
||||||
Assertions.assertTrue(gr.getSubjects().stream().anyMatch(s -> s.getSubject().getValue().equals("Ecosystem Services hotspots")
|
Assertions
|
||||||
&& s.getSubject().getScheme().equals("ACM") && s.getProvenance() != null &&
|
.assertTrue(
|
||||||
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
|
gr
|
||||||
Assertions.assertTrue(gr.getSubjects().stream().anyMatch(s -> s.getSubject().getValue().equals("Natura 2000")
|
.getSubjects()
|
||||||
&& s.getSubject().getScheme().equals("") && s.getProvenance() != null &&
|
.stream()
|
||||||
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
|
.anyMatch(
|
||||||
|
s -> s.getSubject().getValue().equals("Ecosystem Services hotspots")
|
||||||
|
&& s.getSubject().getScheme().equals("ACM") && s.getProvenance() != null &&
|
||||||
|
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
gr
|
||||||
|
.getSubjects()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
s -> s.getSubject().getValue().equals("Natura 2000")
|
||||||
|
&& s.getSubject().getScheme().equals("") && s.getProvenance() != null &&
|
||||||
|
s.getProvenance().getProvenance().equals("sysimport:crosswalk:repository")));
|
||||||
|
|
||||||
Assertions.assertEquals("Ecosystem Service capacity is higher in areas of multiple designation types",
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"Ecosystem Service capacity is higher in areas of multiple designation types",
|
||||||
gr.getMaintitle());
|
gr.getMaintitle());
|
||||||
|
|
||||||
Assertions.assertEquals(null, gr.getSubtitle());
|
Assertions.assertEquals(null, gr.getSubtitle());
|
||||||
|
|
||||||
Assertions.assertEquals(1, gr.getDescription().size());
|
Assertions.assertEquals(1, gr.getDescription().size());
|
||||||
|
|
||||||
Assertions.assertTrue(gr.getDescription().get(0).startsWith("The implementation of the Ecosystem Service (ES) concept into practice"));
|
Assertions
|
||||||
Assertions.assertTrue(gr.getDescription().get(0).endsWith("start complying with new standards and demands for nature conservation and environmental management."));
|
.assertTrue(
|
||||||
|
gr
|
||||||
|
.getDescription()
|
||||||
|
.get(0)
|
||||||
|
.startsWith("The implementation of the Ecosystem Service (ES) concept into practice"));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
gr
|
||||||
|
.getDescription()
|
||||||
|
.get(0)
|
||||||
|
.endsWith(
|
||||||
|
"start complying with new standards and demands for nature conservation and environmental management."));
|
||||||
|
|
||||||
Assertions.assertEquals("2017-01-01", gr.getPublicationdate());
|
Assertions.assertEquals("2017-01-01", gr.getPublicationdate());
|
||||||
|
|
||||||
|
@ -255,7 +328,9 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals(0, gr.getCoverage().size());
|
Assertions.assertEquals(0, gr.getCoverage().size());
|
||||||
|
|
||||||
Assertions.assertEquals(ModelConstants.ACCESS_RIGHT_OPEN, gr.getBestaccessright().getLabel());
|
Assertions.assertEquals(ModelConstants.ACCESS_RIGHT_OPEN, gr.getBestaccessright().getLabel());
|
||||||
Assertions.assertEquals(Constants.accessRightsCoarMap.get(ModelConstants.ACCESS_RIGHT_OPEN), gr.getBestaccessright().getCode());
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
Constants.accessRightsCoarMap.get(ModelConstants.ACCESS_RIGHT_OPEN), gr.getBestaccessright().getCode());
|
||||||
Assertions.assertEquals(null, gr.getBestaccessright().getOpenAccessRoute());
|
Assertions.assertEquals(null, gr.getBestaccessright().getOpenAccessRoute());
|
||||||
|
|
||||||
Assertions.assertEquals("One Ecosystem", gr.getContainer().getName());
|
Assertions.assertEquals("One Ecosystem", gr.getContainer().getName());
|
||||||
|
@ -284,12 +359,16 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
||||||
|
|
||||||
Assertions.assertEquals(2, gr.getOriginalId().size());
|
Assertions.assertEquals(2, gr.getOriginalId().size());
|
||||||
Assertions.assertTrue(gr.getOriginalId().contains("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2")
|
Assertions
|
||||||
&& gr.getOriginalId().contains("10.3897/oneeco.2.e13718"));
|
.assertTrue(
|
||||||
|
gr.getOriginalId().contains("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2")
|
||||||
|
&& gr.getOriginalId().contains("10.3897/oneeco.2.e13718"));
|
||||||
|
|
||||||
Assertions.assertEquals(1, gr.getPid().size());
|
Assertions.assertEquals(1, gr.getPid().size());
|
||||||
Assertions.assertTrue(gr.getPid().get(0).getScheme().equals("doi")
|
Assertions
|
||||||
&& gr.getPid().get(0).getValue().equals("10.1016/j.triboint.2014.05.004"));
|
.assertTrue(
|
||||||
|
gr.getPid().get(0).getScheme().equals("doi")
|
||||||
|
&& gr.getPid().get(0).getValue().equals("10.1016/j.triboint.2014.05.004"));
|
||||||
|
|
||||||
Assertions.assertEquals("2020-03-23T00:20:51.392Z", gr.getDateofcollection());
|
Assertions.assertEquals("2020-03-23T00:20:51.392Z", gr.getDateofcollection());
|
||||||
|
|
||||||
|
@ -298,53 +377,63 @@ public class DumpJobTest {
|
||||||
Instance instance = gr.getInstance().get(0);
|
Instance instance = gr.getInstance().get(0);
|
||||||
Assertions.assertEquals(0, instance.getPid().size());
|
Assertions.assertEquals(0, instance.getPid().size());
|
||||||
Assertions.assertEquals(1, instance.getAlternateIdentifier().size());
|
Assertions.assertEquals(1, instance.getAlternateIdentifier().size());
|
||||||
Assertions.assertTrue(instance.getAlternateIdentifier().get(0).getScheme().equals("doi")
|
Assertions
|
||||||
&& instance.getAlternateIdentifier().get(0).getValue().equals("10.3897/oneeco.2.e13718"));
|
.assertTrue(
|
||||||
|
instance.getAlternateIdentifier().get(0).getScheme().equals("doi")
|
||||||
|
&& instance.getAlternateIdentifier().get(0).getValue().equals("10.3897/oneeco.2.e13718"));
|
||||||
Assertions.assertEquals(null, instance.getLicense());
|
Assertions.assertEquals(null, instance.getLicense());
|
||||||
Assertions.assertTrue(instance.getAccessright().getCode().equals(Constants.accessRightsCoarMap
|
Assertions
|
||||||
.get(ModelConstants.ACCESS_RIGHT_OPEN)));
|
.assertTrue(
|
||||||
|
instance
|
||||||
|
.getAccessright()
|
||||||
|
.getCode()
|
||||||
|
.equals(
|
||||||
|
Constants.accessRightsCoarMap
|
||||||
|
.get(ModelConstants.ACCESS_RIGHT_OPEN)));
|
||||||
Assertions.assertTrue(instance.getAccessright().getLabel().equals(ModelConstants.ACCESS_RIGHT_OPEN));
|
Assertions.assertTrue(instance.getAccessright().getLabel().equals(ModelConstants.ACCESS_RIGHT_OPEN));
|
||||||
Assertions.assertTrue(instance.getAccessright().getOpenAccessRoute().equals(OpenAccessRoute.green));
|
Assertions.assertTrue(instance.getAccessright().getOpenAccessRoute().equals(OpenAccessRoute.green));
|
||||||
Assertions.assertTrue(instance.getType().equals("Article"));
|
Assertions.assertTrue(instance.getType().equals("Article"));
|
||||||
Assertions.assertEquals(2, instance.getUrl().size());
|
Assertions.assertEquals(2, instance.getUrl().size());
|
||||||
Assertions.assertTrue(instance.getUrl().contains("https://doi.org/10.3897/oneeco.2.e13718")
|
Assertions
|
||||||
&& instance.getUrl().contains("https://oneecosystem.pensoft.net/article/13718/"));
|
.assertTrue(
|
||||||
Assertions.assertEquals("2017-01-01",instance.getPublicationdate());
|
instance.getUrl().contains("https://doi.org/10.3897/oneeco.2.e13718")
|
||||||
Assertions.assertEquals(null,instance.getArticleprocessingcharge());
|
&& instance.getUrl().contains("https://oneecosystem.pensoft.net/article/13718/"));
|
||||||
|
Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
|
||||||
|
Assertions.assertEquals(null, instance.getArticleprocessingcharge());
|
||||||
Assertions.assertEquals("peerReviewed", instance.getRefereed());
|
Assertions.assertEquals("peerReviewed", instance.getRefereed());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDatasetDump(){
|
public void testDatasetDump() {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset_extendedinstance")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset_extendedinstance")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(false, sourcePath, workingDir.toString() + "/result",
|
.run(
|
||||||
communityMapPath, Dataset.class,
|
false, sourcePath, workingDir.toString() + "/result",
|
||||||
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
communityMapPath, Dataset.class,
|
||||||
|
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<GraphResult> tmp = sc
|
JavaRDD<GraphResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
Assertions.assertEquals(1, verificationDataset.count());
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.filter("type = 'dataset'").count());
|
Assertions.assertEquals(1, verificationDataset.filter("type = 'dataset'").count());
|
||||||
|
|
||||||
//the common fields in the result have been already checked. Now checking only
|
// the common fields in the result have been already checked. Now checking only
|
||||||
// community specific fields
|
// community specific fields
|
||||||
|
|
||||||
GraphResult gr = verificationDataset.first();
|
GraphResult gr = verificationDataset.first();
|
||||||
|
@ -353,10 +442,33 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals(2, gr.getGeolocation().stream().filter(gl -> gl.getBox().equals("")).count());
|
Assertions.assertEquals(2, gr.getGeolocation().stream().filter(gl -> gl.getBox().equals("")).count());
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("")).count());
|
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("")).count());
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("")).count());
|
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("")).count());
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPlace().equals("18 York St, Ottawa, ON K1N 5S6; Ottawa; Ontario; Canada")).count());
|
Assertions
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("45.427242 -75.693904")).count());
|
.assertEquals(
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("") && !gl.getPlace().equals("")).count());
|
1,
|
||||||
Assertions.assertEquals(1, gr.getGeolocation().stream().filter(gl -> !gl.getPoint().equals("") && gl.getPlace().equals("")).count());
|
gr
|
||||||
|
.getGeolocation()
|
||||||
|
.stream()
|
||||||
|
.filter(gl -> gl.getPlace().equals("18 York St, Ottawa, ON K1N 5S6; Ottawa; Ontario; Canada"))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, gr.getGeolocation().stream().filter(gl -> gl.getPoint().equals("45.427242 -75.693904")).count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
gr
|
||||||
|
.getGeolocation()
|
||||||
|
.stream()
|
||||||
|
.filter(gl -> gl.getPoint().equals("") && !gl.getPlace().equals(""))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
gr
|
||||||
|
.getGeolocation()
|
||||||
|
.stream()
|
||||||
|
.filter(gl -> !gl.getPoint().equals("") && gl.getPlace().equals(""))
|
||||||
|
.count());
|
||||||
|
|
||||||
Assertions.assertEquals("1024Gb", gr.getSize());
|
Assertions.assertEquals("1024Gb", gr.getSize());
|
||||||
|
|
||||||
|
@ -373,30 +485,30 @@ public class DumpJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSoftwareDump(){
|
public void testSoftwareDump() {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software_extendedinstance")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software_extendedinstance")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(false, sourcePath, workingDir.toString() + "/result",
|
.run(
|
||||||
communityMapPath, Software.class,
|
false, sourcePath, workingDir.toString() + "/result",
|
||||||
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
communityMapPath, Software.class,
|
||||||
|
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
|
||||||
JavaRDD<GraphResult> tmp = sc
|
JavaRDD<GraphResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
Assertions.assertEquals(1, verificationDataset.count());
|
||||||
|
|
||||||
|
@ -412,7 +524,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals("perl", gr.getProgrammingLanguage());
|
Assertions.assertEquals("perl", gr.getProgrammingLanguage());
|
||||||
|
|
||||||
|
|
||||||
Assertions.assertEquals(null, gr.getContainer());
|
Assertions.assertEquals(null, gr.getContainer());
|
||||||
Assertions.assertEquals(null, gr.getContactperson());
|
Assertions.assertEquals(null, gr.getContactperson());
|
||||||
Assertions.assertEquals(null, gr.getContactgroup());
|
Assertions.assertEquals(null, gr.getContactgroup());
|
||||||
|
@ -424,30 +535,30 @@ public class DumpJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOrpDump(){
|
public void testOrpDump() {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp_extendedinstance")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp_extendedinstance")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(false, sourcePath, workingDir.toString() + "/result",
|
.run(
|
||||||
communityMapPath, OtherResearchProduct.class,
|
false, sourcePath, workingDir.toString() + "/result",
|
||||||
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
communityMapPath, OtherResearchProduct.class,
|
||||||
|
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
|
||||||
JavaRDD<GraphResult> tmp = sc
|
JavaRDD<GraphResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, GraphResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<GraphResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
Assertions.assertEquals(1, verificationDataset.count());
|
||||||
|
|
||||||
|
@ -466,7 +577,6 @@ public class DumpJobTest {
|
||||||
Assertions.assertTrue(gr.getTool().contains("tool1"));
|
Assertions.assertTrue(gr.getTool().contains("tool1"));
|
||||||
Assertions.assertTrue(gr.getTool().contains("tool2"));
|
Assertions.assertTrue(gr.getTool().contains("tool2"));
|
||||||
|
|
||||||
|
|
||||||
Assertions.assertEquals(null, gr.getContainer());
|
Assertions.assertEquals(null, gr.getContainer());
|
||||||
Assertions.assertEquals(null, gr.getDocumentationUrl());
|
Assertions.assertEquals(null, gr.getDocumentationUrl());
|
||||||
Assertions.assertEquals(null, gr.getCodeRepositoryUrl());
|
Assertions.assertEquals(null, gr.getCodeRepositoryUrl());
|
||||||
|
@ -481,32 +591,33 @@ public class DumpJobTest {
|
||||||
public void testPublicationDumpCommunity() throws JsonProcessingException {
|
public void testPublicationDumpCommunity() throws JsonProcessingException {
|
||||||
|
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication_extendedinstance")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
.run(
|
||||||
CommunityResult.class, Constants.DUMPTYPE.COMMUNITY.getType());
|
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
||||||
|
CommunityResult.class, Constants.DUMPTYPE.COMMUNITY.getType());
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<CommunityResult> tmp = sc
|
JavaRDD<CommunityResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
Assertions.assertEquals(1, verificationDataset.count());
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.filter("type = 'publication'").count());
|
Assertions.assertEquals(1, verificationDataset.filter("type = 'publication'").count());
|
||||||
|
|
||||||
//the common fields in the result have been already checked. Now checking only
|
// the common fields in the result have been already checked. Now checking only
|
||||||
// community specific fields
|
// community specific fields
|
||||||
|
|
||||||
CommunityResult cr = verificationDataset.first();
|
CommunityResult cr = verificationDataset.first();
|
||||||
|
@ -519,15 +630,20 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals("0.9", cr.getContext().get(0).getProvenance().get(0).getTrust());
|
Assertions.assertEquals("0.9", cr.getContext().get(0).getProvenance().get(0).getTrust());
|
||||||
|
|
||||||
Assertions.assertEquals(1, cr.getCollectedfrom().size());
|
Assertions.assertEquals(1, cr.getCollectedfrom().size());
|
||||||
Assertions.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey());
|
Assertions
|
||||||
|
.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey());
|
||||||
Assertions.assertEquals("Pensoft", cr.getCollectedfrom().get(0).getValue());
|
Assertions.assertEquals("Pensoft", cr.getCollectedfrom().get(0).getValue());
|
||||||
|
|
||||||
Assertions.assertEquals(1, cr.getInstance().size());
|
Assertions.assertEquals(1, cr.getInstance().size());
|
||||||
Assertions.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getInstance().get(0).getCollectedfrom().getKey());
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db",
|
||||||
|
cr.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
Assertions.assertEquals("Pensoft", cr.getInstance().get(0).getCollectedfrom().getValue());
|
Assertions.assertEquals("Pensoft", cr.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
Assertions.assertEquals("10|openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey());
|
Assertions
|
||||||
Assertions.assertEquals("One Ecosystem",cr.getInstance().get(0).getHostedby().getValue());
|
.assertEquals(
|
||||||
|
"10|openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey());
|
||||||
|
Assertions.assertEquals("One Ecosystem", cr.getInstance().get(0).getHostedby().getValue());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -587,8 +703,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90);
|
Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -650,7 +764,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals(0, verificationDataset.count());
|
Assertions.assertEquals(0, verificationDataset.count());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -718,7 +831,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals(6, verificationDataset.filter("type = 'software'").count());
|
Assertions.assertEquals(6, verificationDataset.filter("type = 'software'").count());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -814,7 +926,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals(23, verificationDataset.count());
|
Assertions.assertEquals(23, verificationDataset.count());
|
||||||
|
|
||||||
|
|
||||||
Assertions.assertEquals(23, verificationDataset.filter("type = 'publication'").count());
|
Assertions.assertEquals(23, verificationDataset.filter("type = 'publication'").count());
|
||||||
|
|
||||||
verificationDataset.createOrReplaceTempView("check");
|
verificationDataset.createOrReplaceTempView("check");
|
||||||
|
@ -832,7 +943,6 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,16 +8,17 @@ import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.neethi.Assertion;
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.api.java.function.ForeachFunction;
|
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.Row;
|
import org.apache.spark.sql.Row;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -229,107 +230,98 @@ public class PrepareResultProjectJobTest {
|
||||||
public void testMatchValidated() throws Exception {
|
public void testMatchValidated() throws Exception {
|
||||||
|
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultProject/match_validatedRels")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultProject/match_validatedRels")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkPrepareResultProject.main(new String[] {
|
SparkPrepareResultProject.main(new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-outputPath", workingDir.toString() + "/preparedInfo",
|
"-outputPath", workingDir.toString() + "/preparedInfo",
|
||||||
"-sourcePath", sourcePath
|
"-sourcePath", sourcePath
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<ResultProject> tmp = sc
|
JavaRDD<ResultProject> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/preparedInfo")
|
.textFile(workingDir.toString() + "/preparedInfo")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, ResultProject.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, ResultProject.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<ResultProject> verificationDataset = spark
|
org.apache.spark.sql.Dataset<ResultProject> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(ResultProject.class));
|
.createDataset(tmp.rdd(), Encoders.bean(ResultProject.class));
|
||||||
|
|
||||||
Assertions.assertTrue(verificationDataset.count() == 2);
|
assertEquals(2, verificationDataset.count() );
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
verificationDataset.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'").count());
|
||||||
verificationDataset.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'").count());
|
assertEquals(
|
||||||
Assertions
|
1,
|
||||||
.assertEquals(
|
verificationDataset.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count());
|
||||||
1,
|
|
||||||
verificationDataset.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count());
|
|
||||||
|
|
||||||
verificationDataset.createOrReplaceTempView("dataset");
|
verificationDataset.createOrReplaceTempView("dataset");
|
||||||
|
|
||||||
String query = "select resultId, MyT.id project , MyT.title title, MyT.acronym acronym , MyT.provenance.provenance provenance, " +
|
String query = "select resultId, MyT.id project , MyT.title title, MyT.acronym acronym , MyT.provenance.provenance provenance, "
|
||||||
"MyT.validated.validatedByFunder, MyT.validated.validationDate "
|
+
|
||||||
+ "from dataset "
|
"MyT.validated.validatedByFunder, MyT.validated.validationDate "
|
||||||
+ "lateral view explode(projectsList) p as MyT ";
|
+ "from dataset "
|
||||||
|
+ "lateral view explode(projectsList) p as MyT ";
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
|
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
|
||||||
Assertions.assertEquals(3, resultExplodedProvenance.count());
|
assertEquals(3, resultExplodedProvenance.count());
|
||||||
Assertions.assertEquals(3, resultExplodedProvenance.filter("validatedByFunder = true").count());
|
assertEquals(3, resultExplodedProvenance.filter("validatedByFunder = true").count());
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
2,
|
||||||
2,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'")
|
||||||
.filter("resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'")
|
||||||
.filter("resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
2,
|
||||||
2,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter("project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6'")
|
||||||
.filter("project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter(
|
||||||
.filter(
|
"project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6' " +
|
||||||
"project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6' " +
|
"and resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb' " +
|
||||||
"and resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb' " +
|
"and validatedByFunder = true " +
|
||||||
"and validatedByFunder = true " +
|
"and validationDate = '2021-08-06'")
|
||||||
"and validationDate = '2021-08-06'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter(
|
||||||
.filter(
|
"project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6' " +
|
||||||
"project = '40|aka_________::0f7d119de1f656b5763a16acf876fed6' " +
|
"and resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80' " +
|
||||||
"and resultId = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80' " +
|
"and validatedByFunder = true and validationDate = '2021-08-04'")
|
||||||
"and validatedByFunder = true and validationDate = '2021-08-04'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter("project = '40|aka_________::03376222b28a3aebf2730ac514818d04'")
|
||||||
.filter("project = '40|aka_________::03376222b28a3aebf2730ac514818d04'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
1,
|
||||||
1,
|
resultExplodedProvenance
|
||||||
resultExplodedProvenance
|
.filter(
|
||||||
.filter(
|
"project = '40|aka_________::03376222b28a3aebf2730ac514818d04' " +
|
||||||
"project = '40|aka_________::03376222b28a3aebf2730ac514818d04' " +
|
"and resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb' " +
|
||||||
"and resultId = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb' " +
|
"and validatedByFunder = true and validationDate = '2021-08-05'")
|
||||||
"and validatedByFunder = true and validationDate = '2021-08-05'")
|
.count());
|
||||||
.count());
|
|
||||||
|
|
||||||
Assertions
|
assertEquals(
|
||||||
.assertEquals(
|
3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count());
|
||||||
3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,6 @@ import java.util.HashMap;
|
||||||
import java.util.logging.Filter;
|
import java.util.logging.Filter;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
@ -31,6 +29,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo;
|
import eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Result;
|
import eu.dnetlib.dhp.schema.dump.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||||
|
|
||||||
public class UpdateProjectInfoTest {
|
public class UpdateProjectInfoTest {
|
||||||
|
|
||||||
|
@ -142,26 +142,26 @@ public class UpdateProjectInfoTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValidatedRelation() throws Exception{
|
public void testValidatedRelation() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/addProjectInfo")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/addProjectInfo")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkUpdateProjectInfo.main(new String[] {
|
SparkUpdateProjectInfo.main(new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-preparedInfoPath", sourcePath + "/preparedInfoValidated",
|
"-preparedInfoPath", sourcePath + "/preparedInfoValidated",
|
||||||
"-outputPath", workingDir.toString() + "/result",
|
"-outputPath", workingDir.toString() + "/result",
|
||||||
"-sourcePath", sourcePath + "/publication_extendedmodel"
|
"-sourcePath", sourcePath + "/publication_extendedmodel"
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<CommunityResult> tmp = sc
|
JavaRDD<CommunityResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/result")
|
.textFile(workingDir.toString() + "/result")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
|
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
|
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
|
||||||
|
|
||||||
verificationDataset.show(false);
|
verificationDataset.show(false);
|
||||||
|
|
||||||
|
@ -169,10 +169,10 @@ public class UpdateProjectInfoTest {
|
||||||
verificationDataset.createOrReplaceTempView("dataset");
|
verificationDataset.createOrReplaceTempView("dataset");
|
||||||
|
|
||||||
String query = "select id, MyT.code code, MyT.title title, MyT.funder.name funderName, MyT.funder.shortName funderShortName, "
|
String query = "select id, MyT.code code, MyT.title title, MyT.funder.name funderName, MyT.funder.shortName funderShortName, "
|
||||||
+
|
+
|
||||||
"MyT.funder.jurisdiction funderJurisdiction, MyT.funder.fundingStream fundingStream, MyT.validated "
|
"MyT.funder.jurisdiction funderJurisdiction, MyT.funder.fundingStream fundingStream, MyT.validated "
|
||||||
+ "from dataset " +
|
+ "from dataset " +
|
||||||
"lateral view explode(projects) p as MyT ";
|
"lateral view explode(projects) p as MyT ";
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
|
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
|
||||||
|
|
||||||
|
@ -180,27 +180,34 @@ public class UpdateProjectInfoTest {
|
||||||
resultExplodedProvenance.show(false);
|
resultExplodedProvenance.show(false);
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2,
|
2,
|
||||||
resultExplodedProvenance.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2'").count());
|
resultExplodedProvenance.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2'").count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
1,
|
||||||
resultExplodedProvenance
|
resultExplodedProvenance
|
||||||
.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2' and code = '123455'")
|
.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2' and code = '123455'")
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
1,
|
||||||
resultExplodedProvenance
|
resultExplodedProvenance
|
||||||
.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2' and code = '119027'")
|
.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2' and code = '119027'")
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Project project = verificationDataset
|
Project project = verificationDataset
|
||||||
.map((MapFunction<CommunityResult, Project>) cr -> cr.getProjects().stream().filter(p -> p.getValidated() != null).collect(Collectors.toList()).get(0)
|
.map(
|
||||||
, Encoders.bean(Project.class)).first();
|
(MapFunction<CommunityResult, Project>) cr -> cr
|
||||||
|
.getProjects()
|
||||||
|
.stream()
|
||||||
|
.filter(p -> p.getValidated() != null)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
.get(0),
|
||||||
|
Encoders.bean(Project.class))
|
||||||
|
.first();
|
||||||
|
|
||||||
Assertions.assertTrue(project.getFunder().getName().equals("Academy of Finland"));
|
Assertions.assertTrue(project.getFunder().getName().equals("Academy of Finland"));
|
||||||
Assertions.assertTrue(project.getFunder().getShortName().equals("AKA"));
|
Assertions.assertTrue(project.getFunder().getShortName().equals("AKA"));
|
||||||
|
@ -208,18 +215,22 @@ public class UpdateProjectInfoTest {
|
||||||
Assertions.assertTrue(project.getFunder().getFundingStream() == null);
|
Assertions.assertTrue(project.getFunder().getFundingStream() == null);
|
||||||
Assertions.assertTrue(project.getValidated().getValidationDate().equals("2021-08-06"));
|
Assertions.assertTrue(project.getValidated().getValidationDate().equals("2021-08-06"));
|
||||||
|
|
||||||
|
|
||||||
project = verificationDataset
|
project = verificationDataset
|
||||||
.map((MapFunction<CommunityResult, Project>) cr -> cr.getProjects().stream().filter(p -> p.getValidated() == null).collect(Collectors.toList()).get(0)
|
.map(
|
||||||
, Encoders.bean(Project.class)).first();
|
(MapFunction<CommunityResult, Project>) cr -> cr
|
||||||
|
.getProjects()
|
||||||
|
.stream()
|
||||||
|
.filter(p -> p.getValidated() == null)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
.get(0),
|
||||||
|
Encoders.bean(Project.class))
|
||||||
|
.first();
|
||||||
|
|
||||||
Assertions.assertTrue(project.getFunder().getName().equals("European Commission"));
|
Assertions.assertTrue(project.getFunder().getName().equals("European Commission"));
|
||||||
Assertions.assertTrue(project.getFunder().getShortName().equals("EC"));
|
Assertions.assertTrue(project.getFunder().getShortName().equals("EC"));
|
||||||
Assertions.assertTrue(project.getFunder().getJurisdiction().equals("EU"));
|
Assertions.assertTrue(project.getFunder().getJurisdiction().equals("EU"));
|
||||||
Assertions.assertTrue(project.getFunder().getFundingStream().equals("H2020"));
|
Assertions.assertTrue(project.getFunder().getFundingStream().equals("H2020"));
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,8 @@ public class DumpRelationTest {
|
||||||
|
|
||||||
verificationDataset.createOrReplaceTempView("table");
|
verificationDataset.createOrReplaceTempView("table");
|
||||||
|
|
||||||
verificationDataset.foreach((ForeachFunction<Relation>)r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
verificationDataset
|
||||||
|
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
|
||||||
Dataset<Row> check = spark
|
Dataset<Row> check = spark
|
||||||
.sql(
|
.sql(
|
||||||
|
@ -134,13 +135,13 @@ public class DumpRelationTest {
|
||||||
public void test2() throws Exception {
|
public void test2() throws Exception {
|
||||||
|
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/relation/relation_validated")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/relation/relation_validated")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkDumpRelationJob.main(new String[] {
|
SparkDumpRelationJob.main(new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-outputPath", workingDir.toString() + "/relation",
|
"-outputPath", workingDir.toString() + "/relation",
|
||||||
"-sourcePath", sourcePath
|
"-sourcePath", sourcePath
|
||||||
});
|
});
|
||||||
|
|
||||||
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
|
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
|
||||||
|
@ -148,57 +149,58 @@ public class DumpRelationTest {
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Relation> tmp = sc
|
JavaRDD<Relation> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/relation")
|
.textFile(workingDir.toString() + "/relation")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
verificationDataset.createOrReplaceTempView("table");
|
verificationDataset.createOrReplaceTempView("table");
|
||||||
|
|
||||||
verificationDataset.foreach((ForeachFunction<Relation>)r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
verificationDataset
|
||||||
|
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
|
||||||
Dataset<Row> check = spark
|
Dataset<Row> check = spark
|
||||||
.sql(
|
.sql(
|
||||||
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||||
+
|
+
|
||||||
"from table ");
|
"from table ");
|
||||||
|
|
||||||
Assertions.assertEquals(20, check.filter("name = 'isProvidedBy'").count());
|
Assertions.assertEquals(20, check.filter("name = 'isProvidedBy'").count());
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
20, check
|
20, check
|
||||||
.filter(
|
.filter(
|
||||||
"name = 'isProvidedBy' and stype = 'datasource' and ttype = 'organization' and " +
|
"name = 'isProvidedBy' and stype = 'datasource' and ttype = 'organization' and " +
|
||||||
"provenance = 'Harvested'")
|
"provenance = 'Harvested'")
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions.assertEquals(7, check.filter("name = 'isParticipant'").count());
|
Assertions.assertEquals(7, check.filter("name = 'isParticipant'").count());
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
7, check
|
7, check
|
||||||
.filter(
|
.filter(
|
||||||
"name = 'isParticipant' and stype = 'organization' and ttype = 'project' " +
|
"name = 'isParticipant' and stype = 'organization' and ttype = 'project' " +
|
||||||
"and provenance = 'Harvested'")
|
"and provenance = 'Harvested'")
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions.assertEquals(1, check.filter("name = 'isAuthorInstitutionOf'").count());
|
Assertions.assertEquals(1, check.filter("name = 'isAuthorInstitutionOf'").count());
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1, check
|
1, check
|
||||||
.filter(
|
.filter(
|
||||||
"name = 'isAuthorInstitutionOf' and stype = 'organization' and ttype = 'result' " +
|
"name = 'isAuthorInstitutionOf' and stype = 'organization' and ttype = 'result' " +
|
||||||
"and provenance = 'Inferred by OpenAIRE'")
|
"and provenance = 'Inferred by OpenAIRE'")
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions.assertEquals(2, check.filter("name = 'isProducedBy'").count());
|
Assertions.assertEquals(2, check.filter("name = 'isProducedBy'").count());
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, check
|
2, check
|
||||||
.filter(
|
.filter(
|
||||||
"name = 'isProducedBy' and stype = 'project' and ttype = 'result' " +
|
"name = 'isProducedBy' and stype = 'project' and ttype = 'result' " +
|
||||||
"and provenance = 'Harvested' and validated = true " +
|
"and provenance = 'Harvested' and validated = true " +
|
||||||
"and validationDate = '2021-08-06'")
|
"and validationDate = '2021-08-06'")
|
||||||
.count());
|
.count());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ import static org.mockito.Mockito.lenient;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
@ -14,6 +13,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
|
@ -529,7 +529,8 @@ class QueryInformationSystemTest {
|
||||||
List<ContextInfo> cInfoList = new ArrayList<>();
|
List<ContextInfo> cInfoList = new ArrayList<>();
|
||||||
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
|
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
|
||||||
queryInformationSystem.execContextRelationQuery();
|
queryInformationSystem.execContextRelationQuery();
|
||||||
queryInformationSystem.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
|
queryInformationSystem
|
||||||
|
.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
|
||||||
|
|
||||||
Assertions.assertEquals(5, cInfoList.size());
|
Assertions.assertEquals(5, cInfoList.size());
|
||||||
}
|
}
|
||||||
|
@ -540,7 +541,8 @@ class QueryInformationSystemTest {
|
||||||
List<ContextInfo> cInfoList = new ArrayList<>();
|
List<ContextInfo> cInfoList = new ArrayList<>();
|
||||||
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
|
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
|
||||||
queryInformationSystem.execContextRelationQuery();
|
queryInformationSystem.execContextRelationQuery();
|
||||||
queryInformationSystem.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
|
queryInformationSystem
|
||||||
|
.getContextRelation(consumer, "contentproviders", ModelSupport.entityIdPrefix.get("datasource"));
|
||||||
|
|
||||||
cInfoList.forEach(contextInfo -> {
|
cInfoList.forEach(contextInfo -> {
|
||||||
switch (contextInfo.getId()) {
|
switch (contextInfo.getId()) {
|
||||||
|
|
|
@ -145,8 +145,8 @@ public class SplitPerFunderTest {
|
||||||
|
|
||||||
// CONICYT 0
|
// CONICYT 0
|
||||||
tmp = sc
|
tmp = sc
|
||||||
.textFile(workingDir.toString() + "/split/CONICYTF")
|
.textFile(workingDir.toString() + "/split/CONICYTF")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
Assertions.assertEquals(0, tmp.count());
|
Assertions.assertEquals(0, tmp.count());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,8 @@ public class IndexRecordTransformerTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testForEOSCFutureTraining() throws IOException, TransformerException {
|
public void testForEOSCFutureTraining() throws IOException, TransformerException {
|
||||||
final String record = IOUtils.toString(getClass().getResourceAsStream("eosc-future/training-notebooks-seadatanet.xml"));
|
final String record = IOUtils
|
||||||
|
.toString(getClass().getResourceAsStream("eosc-future/training-notebooks-seadatanet.xml"));
|
||||||
testRecordTransformation(record);
|
testRecordTransformation(record);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
2
pom.xml
2
pom.xml
|
@ -753,7 +753,7 @@
|
||||||
<mockito-core.version>3.3.3</mockito-core.version>
|
<mockito-core.version>3.3.3</mockito-core.version>
|
||||||
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
||||||
<vtd.version>[2.12,3.0)</vtd.version>
|
<vtd.version>[2.12,3.0)</vtd.version>
|
||||||
<dhp-schemas.version>[2.7.18]</dhp-schemas.version>
|
<dhp-schemas.version>[2.7.19]</dhp-schemas.version>
|
||||||
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
||||||
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
||||||
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
||||||
|
|
Loading…
Reference in New Issue