orcid-no-doi #43
|
@ -15,12 +15,12 @@
|
||||||
<snapshotRepository>
|
<snapshotRepository>
|
||||||
<id>dnet45-snapshots</id>
|
<id>dnet45-snapshots</id>
|
||||||
<name>DNet45 Snapshots</name>
|
<name>DNet45 Snapshots</name>
|
||||||
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-snapshots</url>
|
<url>https://maven.d4science.org/nexus/content/repositories/dnet45-snapshots</url>
|
||||||
<layout>default</layout>
|
<layout>default</layout>
|
||||||
</snapshotRepository>
|
</snapshotRepository>
|
||||||
<repository>
|
<repository>
|
||||||
<id>dnet45-releases</id>
|
<id>dnet45-releases</id>
|
||||||
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases</url>
|
<url>https://maven.d4science.org/nexus/content/repositories/dnet45-releases</url>
|
||||||
</repository>
|
</repository>
|
||||||
</distributionManagement>
|
</distributionManagement>
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw.common;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -13,19 +13,43 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.ExtraInfo;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OAIProvenance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OriginDescription;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
|
||||||
public class OafMapperUtils {
|
public class OafMapperUtils {
|
||||||
|
|
||||||
|
public static Oaf merge(final Oaf o1, final Oaf o2) {
|
||||||
|
if (ModelSupport.isSubClass(o1, OafEntity.class)) {
|
||||||
|
if (ModelSupport.isSubClass(o1, Result.class)) {
|
||||||
|
|
||||||
|
return mergeResults((Result) o1, (Result) o2);
|
||||||
|
} else if (ModelSupport.isSubClass(o1, Datasource.class)) {
|
||||||
|
((Datasource) o1).mergeFrom((Datasource) o2);
|
||||||
|
} else if (ModelSupport.isSubClass(o1, Organization.class)) {
|
||||||
|
((Organization) o1).mergeFrom((Organization) o2);
|
||||||
|
} else if (ModelSupport.isSubClass(o1, Project.class)) {
|
||||||
|
((Project) o1).mergeFrom((Project) o2);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException("invalid OafEntity subtype:" + o1.getClass().getCanonicalName());
|
||||||
|
}
|
||||||
|
} else if (ModelSupport.isSubClass(o1, Relation.class)) {
|
||||||
|
((Relation) o1).mergeFrom((Relation) o2);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException("invalid Oaf type:" + o1.getClass().getCanonicalName());
|
||||||
|
}
|
||||||
|
return o1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Result mergeResults(Result r1, Result r2) {
|
||||||
|
if (new ResultTypeComparator().compare(r1, r2) < 0) {
|
||||||
|
r1.mergeFrom(r2);
|
||||||
|
return r1;
|
||||||
|
} else {
|
||||||
|
r2.mergeFrom(r1);
|
||||||
|
return r2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static KeyValue keyValue(final String k, final String v) {
|
public static KeyValue keyValue(final String k, final String v) {
|
||||||
final KeyValue kv = new KeyValue();
|
final KeyValue kv = new KeyValue();
|
||||||
kv.setKey(k);
|
kv.setKey(k);
|
|
@ -0,0 +1,49 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import java.util.Comparator;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
|
||||||
|
public class ResultTypeComparator implements Comparator<Result> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(Result left, Result right) {
|
||||||
|
|
||||||
|
if (left == null && right == null)
|
||||||
|
return 0;
|
||||||
|
if (left == null)
|
||||||
|
return 1;
|
||||||
|
if (right == null)
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
String lClass = left.getResulttype().getClassid();
|
||||||
|
String rClass = right.getResulttype().getClassid();
|
||||||
|
|
||||||
|
if (lClass.equals(rClass))
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
if (lClass.equals(ModelConstants.PUBLICATION_RESULTTYPE_CLASSID))
|
||||||
|
return -1;
|
||||||
|
if (rClass.equals(ModelConstants.PUBLICATION_RESULTTYPE_CLASSID))
|
||||||
|
return 1;
|
||||||
|
|
||||||
|
if (lClass.equals(ModelConstants.DATASET_RESULTTYPE_CLASSID))
|
||||||
|
return -1;
|
||||||
|
if (rClass.equals(ModelConstants.DATASET_RESULTTYPE_CLASSID))
|
||||||
|
return 1;
|
||||||
|
|
||||||
|
if (lClass.equals(ModelConstants.SOFTWARE_RESULTTYPE_CLASSID))
|
||||||
|
return -1;
|
||||||
|
if (rClass.equals(ModelConstants.SOFTWARE_RESULTTYPE_CLASSID))
|
||||||
|
return 1;
|
||||||
|
|
||||||
|
if (lClass.equals(ModelConstants.ORP_RESULTTYPE_CLASSID))
|
||||||
|
return -1;
|
||||||
|
if (rClass.equals(ModelConstants.ORP_RESULTTYPE_CLASSID))
|
||||||
|
return 1;
|
||||||
|
|
||||||
|
// Else (but unlikely), lexicographical ordering will do.
|
||||||
|
return lClass.compareTo(rClass);
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,6 +5,7 @@ import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
|
import java.util.List;
|
||||||
import java.util.zip.GZIPInputStream;
|
import java.util.zip.GZIPInputStream;
|
||||||
import java.util.zip.GZIPOutputStream;
|
import java.util.zip.GZIPOutputStream;
|
||||||
|
|
||||||
|
@ -15,9 +16,15 @@ import org.apache.commons.codec.binary.Hex;
|
||||||
import com.jayway.jsonpath.JsonPath;
|
import com.jayway.jsonpath.JsonPath;
|
||||||
|
|
||||||
import net.minidev.json.JSONArray;
|
import net.minidev.json.JSONArray;
|
||||||
|
import scala.collection.JavaConverters;
|
||||||
|
import scala.collection.Seq;
|
||||||
|
|
||||||
public class DHPUtils {
|
public class DHPUtils {
|
||||||
|
|
||||||
|
public static Seq<String> toSeq(List<String> list) {
|
||||||
|
return JavaConverters.asScalaIteratorConverter(list.iterator()).asScala().toSeq();
|
||||||
|
}
|
||||||
|
|
||||||
public static String md5(final String s) {
|
public static String md5(final String s) {
|
||||||
try {
|
try {
|
||||||
final MessageDigest md = MessageDigest.getInstance("MD5");
|
final MessageDigest md = MessageDigest.getInstance("MD5");
|
||||||
|
|
|
@ -3,14 +3,12 @@ package eu.dnetlib.dhp.oa.graph.clean;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.BufferedInputStream;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -23,11 +21,9 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.AbstractMdRecordToOafMapper;
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
|
@ -75,12 +71,12 @@ public class CleanGraphSparkJob {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, outputPath);
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
fixGraphTable(spark, vocs, inputPath, entityClazz, outputPath);
|
cleanGraphTable(spark, vocs, inputPath, entityClazz, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Oaf> void fixGraphTable(
|
private static <T extends Oaf> void cleanGraphTable(
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
VocabularyGroup vocs,
|
VocabularyGroup vocs,
|
||||||
String inputPath,
|
String inputPath,
|
||||||
|
@ -106,13 +102,15 @@ public class CleanGraphSparkJob {
|
||||||
return spark
|
return spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputEntityPath)
|
.textFile(inputEntityPath)
|
||||||
|
.filter((FilterFunction<String>) s -> isEntityType(s, clazz))
|
||||||
|
.map((MapFunction<String, String>) s -> StringUtils.substringAfter(s, "|"), Encoders.STRING())
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz),
|
(MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz),
|
||||||
Encoders.bean(clazz));
|
Encoders.bean(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void removeOutputDir(SparkSession spark, String path) {
|
private static <T extends Oaf> boolean isEntityType(final String s, final Class<T> clazz) {
|
||||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
return StringUtils.substringBefore(s, "|").equals(clazz.getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,6 @@ import org.apache.commons.lang3.StringUtils;
|
||||||
import com.clearspring.analytics.util.Lists;
|
import com.clearspring.analytics.util.Lists;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.AbstractMdRecordToOafMapper;
|
import eu.dnetlib.dhp.oa.graph.raw.AbstractMdRecordToOafMapper;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
|
||||||
|
@ -115,7 +114,7 @@ public class CleaningFunctions {
|
||||||
.stream()
|
.stream()
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.filter(sp -> StringUtils.isNotBlank(StringUtils.trim(sp.getValue())))
|
.filter(sp -> StringUtils.isNotBlank(StringUtils.trim(sp.getValue())))
|
||||||
.filter(sp -> NONE.equalsIgnoreCase(sp.getValue()))
|
.filter(sp -> !NONE.equalsIgnoreCase(sp.getValue().trim()))
|
||||||
.filter(sp -> Objects.nonNull(sp.getQualifier()))
|
.filter(sp -> Objects.nonNull(sp.getQualifier()))
|
||||||
.filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
|
.filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
|
||||||
.map(sp -> {
|
.map(sp -> {
|
||||||
|
|
|
@ -0,0 +1,206 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.clean;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.utils.DHPUtils.toSeq;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.apache.spark.sql.expressions.Aggregator;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.jayway.jsonpath.Configuration;
|
||||||
|
import com.jayway.jsonpath.DocumentContext;
|
||||||
|
import com.jayway.jsonpath.JsonPath;
|
||||||
|
import com.jayway.jsonpath.Option;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Groups the graph content by entity identifier to ensure ID uniqueness
|
||||||
|
*/
|
||||||
|
public class GroupEntitiesAndRelationsSparkJob {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(GroupEntitiesAndRelationsSparkJob.class);
|
||||||
|
|
||||||
|
private final static String ID_JPATH = "$.id";
|
||||||
|
|
||||||
|
private final static String SOURCE_JPATH = "$.source";
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
GroupEntitiesAndRelationsSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/group_graph_entities_parameters.json"));
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
String graphInputPath = parser.get("graphInputPath");
|
||||||
|
log.info("graphInputPath: {}", graphInputPath);
|
||||||
|
|
||||||
|
String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
||||||
|
conf.registerKryoClasses(ModelSupport.getOafModelClasses());
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
|
groupEntitiesAndRelations(spark, graphInputPath, outputPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void groupEntitiesAndRelations(
|
||||||
|
SparkSession spark,
|
||||||
|
String inputPath,
|
||||||
|
String outputPath) {
|
||||||
|
|
||||||
|
TypedColumn<Oaf, Oaf> aggregator = new GroupingAggregator().toColumn();
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(toSeq(listPaths(inputPath, sc)))
|
||||||
|
.map((MapFunction<String, Oaf>) s -> parseOaf(s), Encoders.kryo(Oaf.class))
|
||||||
|
.filter((FilterFunction<Oaf>) oaf -> StringUtils.isNotBlank(ModelSupport.idFn().apply(oaf)))
|
||||||
|
.groupByKey((MapFunction<Oaf, String>) oaf -> ModelSupport.idFn().apply(oaf), Encoders.STRING())
|
||||||
|
.agg(aggregator)
|
||||||
|
.map(
|
||||||
|
(MapFunction<Tuple2<String, Oaf>, String>) t -> t._2().getClass().getName() +
|
||||||
|
"|" + OBJECT_MAPPER.writeValueAsString(t._2()),
|
||||||
|
Encoders.STRING())
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.text(outputPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class GroupingAggregator extends Aggregator<Oaf, Oaf, Oaf> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Oaf zero() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Oaf reduce(Oaf b, Oaf a) {
|
||||||
|
return mergeAndGet(b, a);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Oaf mergeAndGet(Oaf b, Oaf a) {
|
||||||
|
if (Objects.nonNull(a) && Objects.nonNull(b)) {
|
||||||
|
return OafMapperUtils.merge(b, a);
|
||||||
|
}
|
||||||
|
return Objects.isNull(a) ? b : a;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Oaf merge(Oaf b, Oaf a) {
|
||||||
|
return mergeAndGet(b, a);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Oaf finish(Oaf j) {
|
||||||
|
return j;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Encoder<Oaf> bufferEncoder() {
|
||||||
|
return Encoders.kryo(Oaf.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Encoder<Oaf> outputEncoder() {
|
||||||
|
return Encoders.kryo(Oaf.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Oaf parseOaf(String s) {
|
||||||
|
|
||||||
|
DocumentContext dc = JsonPath
|
||||||
|
.parse(s, Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS));
|
||||||
|
final String id = dc.read(ID_JPATH);
|
||||||
|
if (StringUtils.isNotBlank(id)) {
|
||||||
|
|
||||||
|
String prefix = StringUtils.substringBefore(id, "|");
|
||||||
|
switch (prefix) {
|
||||||
|
case "10":
|
||||||
|
return parse(s, Datasource.class);
|
||||||
|
case "20":
|
||||||
|
return parse(s, Organization.class);
|
||||||
|
case "40":
|
||||||
|
return parse(s, Project.class);
|
||||||
|
case "50":
|
||||||
|
String resultType = dc.read("$.resulttype.classid");
|
||||||
|
switch (resultType) {
|
||||||
|
case "publication":
|
||||||
|
return parse(s, Publication.class);
|
||||||
|
case "dataset":
|
||||||
|
return parse(s, eu.dnetlib.dhp.schema.oaf.Dataset.class);
|
||||||
|
case "software":
|
||||||
|
return parse(s, Software.class);
|
||||||
|
case "other":
|
||||||
|
return parse(s, OtherResearchProduct.class);
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException(String.format("invalid resultType: '%s'", resultType));
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException(String.format("invalid id prefix: '%s'", prefix));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String source = dc.read(SOURCE_JPATH);
|
||||||
|
if (StringUtils.isNotBlank(source)) {
|
||||||
|
return parse(s, Relation.class);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException(String.format("invalid oaf: '%s'", s));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Oaf> Oaf parse(String s, Class<T> clazz) {
|
||||||
|
try {
|
||||||
|
return OBJECT_MAPPER.readValue(s, clazz);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<String> listPaths(String inputPath, JavaSparkContext sc) {
|
||||||
|
return HdfsSupport
|
||||||
|
.listFiles(inputPath, sc.hadoopConfiguration())
|
||||||
|
.stream()
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -33,9 +33,9 @@ import scala.Tuple2;
|
||||||
* are picked preferring those from the BETA aggregator rather then from PROD. The identity of a relationship is defined
|
* are picked preferring those from the BETA aggregator rather then from PROD. The identity of a relationship is defined
|
||||||
* by eu.dnetlib.dhp.schema.common.ModelSupport#idFn()
|
* by eu.dnetlib.dhp.schema.common.ModelSupport#idFn()
|
||||||
*/
|
*/
|
||||||
public class MergeGraphSparkJob {
|
public class MergeGraphTableSparkJob {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(CleanGraphSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(MergeGraphTableSparkJob.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw;
|
package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.*;
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.*;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
|
|
@ -29,16 +29,7 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Organization;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
@ -78,7 +69,7 @@ public class GenerateEntitiesApplication {
|
||||||
|
|
||||||
final SparkConf conf = new SparkConf();
|
final SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
||||||
removeOutputDir(spark, targetPath);
|
HdfsSupport.remove(targetPath, spark.sparkContext().hadoopConfiguration());
|
||||||
generateEntities(spark, vocs, sourcePaths, targetPath);
|
generateEntities(spark, vocs, sourcePaths, targetPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -92,7 +83,7 @@ public class GenerateEntitiesApplication {
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
final List<String> existingSourcePaths = Arrays
|
final List<String> existingSourcePaths = Arrays
|
||||||
.stream(sourcePaths.split(","))
|
.stream(sourcePaths.split(","))
|
||||||
.filter(p -> exists(sc, p))
|
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
log.info("Generate entities from files:");
|
log.info("Generate entities from files:");
|
||||||
|
@ -113,7 +104,7 @@ public class GenerateEntitiesApplication {
|
||||||
|
|
||||||
inputRdd
|
inputRdd
|
||||||
.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
||||||
.reduceByKey((o1, o2) -> merge(o1, o2))
|
.reduceByKey((o1, o2) -> OafMapperUtils.merge(o1, o2))
|
||||||
.map(Tuple2::_2)
|
.map(Tuple2::_2)
|
||||||
.map(
|
.map(
|
||||||
oaf -> oaf.getClass().getSimpleName().toLowerCase()
|
oaf -> oaf.getClass().getSimpleName().toLowerCase()
|
||||||
|
@ -122,17 +113,6 @@ public class GenerateEntitiesApplication {
|
||||||
.saveAsTextFile(targetPath, GzipCodec.class);
|
.saveAsTextFile(targetPath, GzipCodec.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Oaf merge(final Oaf o1, final Oaf o2) {
|
|
||||||
if (ModelSupport.isSubClass(o1, OafEntity.class)) {
|
|
||||||
((OafEntity) o1).mergeFrom((OafEntity) o2);
|
|
||||||
} else if (ModelSupport.isSubClass(o1, Relation.class)) {
|
|
||||||
((Relation) o1).mergeFrom((Relation) o2);
|
|
||||||
} else {
|
|
||||||
throw new RuntimeException("invalid Oaf type:" + o1.getClass().getCanonicalName());
|
|
||||||
}
|
|
||||||
return o1;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Oaf> convertToListOaf(
|
private static List<Oaf> convertToListOaf(
|
||||||
final String id,
|
final String id,
|
||||||
final String s,
|
final String s,
|
||||||
|
@ -181,17 +161,4 @@ public class GenerateEntitiesApplication {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean exists(final JavaSparkContext context, final String pathToFile) {
|
|
||||||
try {
|
|
||||||
final FileSystem hdfs = FileSystem.get(context.hadoopConfiguration());
|
|
||||||
final Path path = new Path(pathToFile);
|
|
||||||
return hdfs.exists(path);
|
|
||||||
} catch (final IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void removeOutputDir(final SparkSession spark, final String path) {
|
|
||||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw;
|
package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.asString;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.dataInfo;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.journal;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASET_DEFAULT_RESULTTYPE;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASET_DEFAULT_RESULTTYPE;
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASOURCE_ORGANIZATION;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASOURCE_ORGANIZATION;
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
||||||
|
@ -32,6 +23,7 @@ import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_PROJECT;
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_RESULT;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_RESULT;
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE;
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.USER_CLAIM;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.USER_CLAIM;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.*;
|
||||||
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -527,9 +519,12 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
||||||
final Boolean deletedbyinference = rs.getBoolean("deletedbyinference");
|
final Boolean deletedbyinference = rs.getBoolean("deletedbyinference");
|
||||||
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
||||||
final Boolean inferred = rs.getBoolean("inferred");
|
final Boolean inferred = rs.getBoolean("inferred");
|
||||||
final String trust = rs.getString("trust");
|
|
||||||
|
final double trust = rs.getDouble("trust");
|
||||||
|
|
||||||
return dataInfo(
|
return dataInfo(
|
||||||
deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust);
|
deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION,
|
||||||
|
String.format("%.3f", trust));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Qualifier prepareQualifierSplitting(final String s) {
|
private Qualifier prepareQualifierSplitting(final String s) {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw;
|
package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.createOpenaireId;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.field;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.structuredProperty;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw;
|
package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.createOpenaireId;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.field;
|
||||||
|
import static eu.dnetlib.dhp.schema.oaf.OafMapperUtils.structuredProperty;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
|
@ -10,6 +10,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafMapperUtils;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
|
|
||||||
public class Vocabulary implements Serializable {
|
public class Vocabulary implements Serializable {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafMapperUtils;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
|
@ -50,12 +50,36 @@
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="fork_clean_graph"/>
|
<start to="group_entities"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="group_entities">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>group graph entities and relations</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.clean.GroupEntitiesAndRelationsSparkJob</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--graphInputPath</arg><arg>${graphInputPath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="fork_clean_graph"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<fork name="fork_clean_graph">
|
<fork name="fork_clean_graph">
|
||||||
<path start="clean_publication"/>
|
<path start="clean_publication"/>
|
||||||
<path start="clean_dataset"/>
|
<path start="clean_dataset"/>
|
||||||
|
@ -84,7 +108,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/publication</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/publication</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/publication</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -110,7 +134,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/dataset</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/dataset</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/dataset</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -136,7 +160,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/otherresearchproduct</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/otherresearchproduct</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/otherresearchproduct</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -162,7 +186,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/software</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/software</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/software</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -188,7 +212,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/datasource</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/datasource</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/datasource</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -214,7 +238,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/organization</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/organization</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/organization</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -240,7 +264,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/project</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/project</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/project</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
@ -266,7 +290,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${graphInputPath}/relation</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/grouped_entities</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/relation</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/relation</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "gin",
|
||||||
|
"paramLongName": "graphInputPath",
|
||||||
|
"paramDescription": "the graph root path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "out",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the output merged graph root path",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -2,11 +2,11 @@
|
||||||
|
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>betaInputGgraphPath</name>
|
<name>betaInputGraphPath</name>
|
||||||
<description>the beta graph root path</description>
|
<description>the beta graph root path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>prodInputGgraphPath</name>
|
<name>prodInputGraphPath</name>
|
||||||
<description>the production graph root path</description>
|
<description>the production graph root path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
|
@ -76,7 +76,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge publications</name>
|
<name>Merge publications</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -88,8 +88,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/publication</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/publication</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/publication</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/publication</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/publication</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/publication</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -103,7 +103,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge datasets</name>
|
<name>Merge datasets</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -115,8 +115,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/dataset</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/dataset</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/dataset</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/dataset</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/dataset</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/dataset</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -130,7 +130,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge otherresearchproducts</name>
|
<name>Merge otherresearchproducts</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -142,8 +142,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/otherresearchproduct</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/otherresearchproduct</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/otherresearchproduct</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/otherresearchproduct</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/otherresearchproduct</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/otherresearchproduct</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -157,7 +157,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge softwares</name>
|
<name>Merge softwares</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -169,8 +169,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/software</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/software</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/software</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/software</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/software</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/software</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -184,7 +184,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge datasources</name>
|
<name>Merge datasources</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -196,8 +196,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/datasource</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/datasource</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/datasource</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/datasource</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/datasource</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/datasource</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -211,7 +211,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge organizations</name>
|
<name>Merge organizations</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -223,8 +223,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/organization</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/organization</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/organization</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/organization</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/organization</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/organization</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -238,7 +238,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge projects</name>
|
<name>Merge projects</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -250,8 +250,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/project</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/project</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/project</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/project</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/project</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/project</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
@ -265,7 +265,7 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Merge relations</name>
|
<name>Merge relations</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.merge.MergeGraphTableSparkJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -277,8 +277,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--betaInputPath</arg><arg>${betaInputGgraphPath}/relation</arg>
|
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/relation</arg>
|
||||||
<arg>--prodInputPath</arg><arg>${prodInputGgraphPath}/relation</arg>
|
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/relation</arg>
|
||||||
<arg>--outputPath</arg><arg>${graphOutputPath}/relation</arg>
|
<arg>--outputPath</arg><arg>${graphOutputPath}/relation</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
||||||
<arg>--priority</arg><arg>${priority}</arg>
|
<arg>--priority</arg><arg>${priority}</arg>
|
||||||
|
|
|
@ -15,7 +15,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||||
|
|
||||||
public class MergeGraphSparkJobTest {
|
public class MergeGraphTableSparkJobTest {
|
||||||
|
|
||||||
private ObjectMapper mapper;
|
private ObjectMapper mapper;
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ public class MergeGraphSparkJobTest {
|
||||||
public void testMergeDatasources() throws IOException {
|
public void testMergeDatasources() throws IOException {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"openaire-cris_1.1",
|
"openaire-cris_1.1",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_cris.json"),
|
d("datasource_cris.json"),
|
||||||
d("datasource_UNKNOWN.json"))
|
d("datasource_UNKNOWN.json"))
|
||||||
|
@ -36,7 +36,7 @@ public class MergeGraphSparkJobTest {
|
||||||
.getClassid());
|
.getClassid());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"openaire-cris_1.1",
|
"openaire-cris_1.1",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_UNKNOWN.json"),
|
d("datasource_UNKNOWN.json"),
|
||||||
d("datasource_cris.json"))
|
d("datasource_cris.json"))
|
||||||
|
@ -44,7 +44,7 @@ public class MergeGraphSparkJobTest {
|
||||||
.getClassid());
|
.getClassid());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"driver-openaire2.0",
|
"driver-openaire2.0",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_native.json"),
|
d("datasource_native.json"),
|
||||||
d("datasource_driver-openaire2.0.json"))
|
d("datasource_driver-openaire2.0.json"))
|
||||||
|
@ -52,7 +52,7 @@ public class MergeGraphSparkJobTest {
|
||||||
.getClassid());
|
.getClassid());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"driver-openaire2.0",
|
"driver-openaire2.0",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_driver-openaire2.0.json"),
|
d("datasource_driver-openaire2.0.json"),
|
||||||
d("datasource_native.json"))
|
d("datasource_native.json"))
|
||||||
|
@ -60,7 +60,7 @@ public class MergeGraphSparkJobTest {
|
||||||
.getClassid());
|
.getClassid());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"openaire4.0",
|
"openaire4.0",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_notCompatible.json"),
|
d("datasource_notCompatible.json"),
|
||||||
d("datasource_openaire4.0.json"))
|
d("datasource_openaire4.0.json"))
|
||||||
|
@ -68,7 +68,7 @@ public class MergeGraphSparkJobTest {
|
||||||
.getClassid());
|
.getClassid());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"notCompatible",
|
"notCompatible",
|
||||||
MergeGraphSparkJob
|
MergeGraphTableSparkJob
|
||||||
.mergeDatasource(
|
.mergeDatasource(
|
||||||
d("datasource_notCompatible.json"),
|
d("datasource_notCompatible.json"),
|
||||||
d("datasource_UNKNOWN.json"))
|
d("datasource_UNKNOWN.json"))
|
|
@ -0,0 +1,99 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
import static org.mockito.Mockito.lenient;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.oa.graph.clean.CleaningFunctionTest;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class GenerateEntitiesApplicationTest {
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private ISLookUpService isLookUpService;
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private VocabularyGroup vocs;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() throws IOException, ISLookUpException {
|
||||||
|
|
||||||
|
lenient().when(isLookUpService.quickSearchProfile(VocabularyGroup.VOCABULARIES_XQUERY)).thenReturn(vocs());
|
||||||
|
lenient()
|
||||||
|
.when(isLookUpService.quickSearchProfile(VocabularyGroup.VOCABULARY_SYNONYMS_XQUERY))
|
||||||
|
.thenReturn(synonyms());
|
||||||
|
|
||||||
|
vocs = VocabularyGroup.loadVocsFromIS(isLookUpService);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergeResult() throws IOException {
|
||||||
|
Result publication = getResult("oaf_record.xml", Publication.class);
|
||||||
|
Result dataset = getResult("odf_dataset.xml", Dataset.class);
|
||||||
|
Result software = getResult("odf_software.xml", Software.class);
|
||||||
|
Result orp = getResult("oaf_orp.xml", OtherResearchProduct.class);
|
||||||
|
|
||||||
|
verifyMerge(publication, dataset, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(dataset, publication, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
verifyMerge(publication, software, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(software, publication, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
verifyMerge(publication, orp, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(orp, publication, Publication.class, ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
verifyMerge(dataset, software, Dataset.class, ModelConstants.DATASET_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(software, dataset, Dataset.class, ModelConstants.DATASET_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
verifyMerge(dataset, orp, Dataset.class, ModelConstants.DATASET_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(orp, dataset, Dataset.class, ModelConstants.DATASET_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
verifyMerge(software, orp, Software.class, ModelConstants.SOFTWARE_RESULTTYPE_CLASSID);
|
||||||
|
verifyMerge(orp, software, Software.class, ModelConstants.SOFTWARE_RESULTTYPE_CLASSID);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected <T extends Result> void verifyMerge(Result publication, Result dataset, Class<T> clazz,
|
||||||
|
String resultType) {
|
||||||
|
final Result merge = OafMapperUtils.mergeResults(publication, dataset);
|
||||||
|
assertTrue(clazz.isAssignableFrom(merge.getClass()));
|
||||||
|
assertEquals(resultType, merge.getResulttype().getClassid());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected <T extends Result> Result getResult(String xmlFileName, Class<T> clazz) throws IOException {
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream(xmlFileName));
|
||||||
|
return new OdfToOafMapper(vocs, false)
|
||||||
|
.processMdRecord(xml)
|
||||||
|
.stream()
|
||||||
|
.filter(s -> clazz.isAssignableFrom(s.getClass()))
|
||||||
|
.map(s -> (Result) s)
|
||||||
|
.findFirst()
|
||||||
|
.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> vocs() throws IOException {
|
||||||
|
return IOUtils
|
||||||
|
.readLines(CleaningFunctionTest.class.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/clean/terms.txt"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> synonyms() throws IOException {
|
||||||
|
return IOUtils
|
||||||
|
.readLines(CleaningFunctionTest.class.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/clean/synonyms.txt"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -27,14 +27,8 @@ import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
import com.fasterxml.jackson.core.type.TypeReference;
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Organization;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
|
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class MigrateDbEntitiesApplicationTest {
|
public class MigrateDbEntitiesApplicationTest {
|
||||||
|
|
|
@ -31,8 +31,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "trust",
|
"field": "trust",
|
||||||
"type": "string",
|
"type": "double",
|
||||||
"value": "0.9"
|
"value": 0.9
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "inferenceprovenance",
|
"field": "inferenceprovenance",
|
||||||
|
|
|
@ -114,8 +114,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "trust",
|
"field": "trust",
|
||||||
"type": "string",
|
"type": "double",
|
||||||
"value": "0.9"
|
"value": 0.9
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "inferenceprovenance",
|
"field": "inferenceprovenance",
|
||||||
|
|
|
@ -0,0 +1,84 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<record xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<header xmlns="http://namespace.openaire.eu/">
|
||||||
|
<dri:objIdentifier>pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2</dri:objIdentifier>
|
||||||
|
<dri:recordIdentifier>10.3897/oneeco.2.e13718</dri:recordIdentifier>
|
||||||
|
<dri:dateOfCollection/>
|
||||||
|
<dri:mdFormat/>
|
||||||
|
<dri:mdFormatInterpretation/>
|
||||||
|
<dri:repositoryId/>
|
||||||
|
<dr:objectIdentifier/>
|
||||||
|
<dr:dateOfCollection>2020-03-23T00:20:51.392Z</dr:dateOfCollection>
|
||||||
|
<dr:dateOfTransformation>2020-03-23T00:26:59.078Z</dr:dateOfTransformation>
|
||||||
|
<oaf:datasourceprefix>pensoft_____</oaf:datasourceprefix>
|
||||||
|
</header>
|
||||||
|
<metadata xmlns="http://namespace.openaire.eu/">
|
||||||
|
<dc:title>Ecosystem Service capacity is higher in areas of multiple designation types</dc:title>
|
||||||
|
<dc:creator>Nikolaidou,Charitini</dc:creator>
|
||||||
|
<dc:creator nameIdentifier="0000-0001-6651-1178" nameIdentifierScheme="ORCID">Votsi,Nefta</dc:creator>
|
||||||
|
<dc:creator>Sgardelis,Steanos</dc:creator>
|
||||||
|
<dc:creator>Halley,John</dc:creator>
|
||||||
|
<dc:creator>Pantis,John</dc:creator>
|
||||||
|
<dc:creator>Tsiafouli,Maria</dc:creator>
|
||||||
|
<dc:date>2017</dc:date>
|
||||||
|
<dc:description>The implementation of the Ecosystem Service (ES) concept into practice might be a challenging task as it has to take into account previous “traditional” policies and approaches that have evaluated nature and biodiversity differently. Among them the Habitat (92/43/EC) and Bird Directives (79/409/EC), the Water Framework Directive (2000/60/EC), and the Noise Directive (2002/49/EC) have led to the evaluation/designation of areas in Europe with different criteria. In this study our goal was to understand how the ES capacity of an area is related to its designation and if areas with multiple designations have higher capacity in providing ES. We selected four catchments in Greece with a great variety of characteristics covering over 25% of the national territory. Inside the catchments we assessed the ES capacity (following the methodology of Burkhard et al. 2009) of areas designated as Natura 2000 sites, Quiet areas and Wetlands or Water bodies and found those areas that have multiple designations. Data were analyzed by GLM to reveal differences regarding the ES capacity among the different types of areas. We also investigated by PCA synergies and trade-offs among different kinds of ES and tested for correlations among landscape properties, such as elevation, aspect and slope and the ES potential. Our results show that areas with different types or multiple designations have a different capacity in providing ES. Areas of one designation type (Protected or Quiet Areas) had in general intermediate scores in most ES but scores were higher compared to areas with no designation, which displayed stronger capacity in provisioning services. Among Protected Areas and Quiet Areas the latter scored better in general. Areas that combined both designation types (Protected and Quiet Areas) showed the highest capacity in 13 out of 29 ES, that were mostly linked with natural and forest ecosystems. We found significant synergies among most regulating, supporting and cultural ES which in turn display trade-offs with provisioning services. The different ES are spatially related and display strong correlation with landscape properties, such as elevation and slope. We suggest that the designation status of an area can be used as an alternative tool for environmental policy, indicating the capacity for ES provision. Multiple designations of areas can be used as proxies for locating ES “hotspots”. This integration of “traditional” evaluation and designation and the “newer” ES concept forms a time- and cost-effective way to be adopted by stakeholders and policy-makers in order to start complying with new standards and demands for nature conservation and environmental management.</dc:description>
|
||||||
|
<dc:format>text/html</dc:format>
|
||||||
|
<dc:identifier>https://doi.org/10.3897/oneeco.2.e13718</dc:identifier>
|
||||||
|
<dc:identifier>https://oneecosystem.pensoft.net/article/13718/</dc:identifier>
|
||||||
|
<dc:language>eng</dc:language>
|
||||||
|
<dc:publisher>Pensoft Publishers</dc:publisher>
|
||||||
|
<dc:relation>info:eu-repo/semantics/altIdentifier/eissn/2367-8194</dc:relation>
|
||||||
|
<dc:relation>info:eu-repo/grantAgreement/EC/FP7/226852</dc:relation>
|
||||||
|
<dc:source>One Ecosystem 2: e13718</dc:source>
|
||||||
|
<dc:source>One Ecosystem 2: e13718</dc:source>
|
||||||
|
<dc:source>One Ecosystem 2: e13718</dc:source>
|
||||||
|
<dc:subject>Ecosystem Services hotspots</dc:subject>
|
||||||
|
<dc:subject>Natura 2000</dc:subject>
|
||||||
|
<dc:subject>Quiet Protected Areas</dc:subject>
|
||||||
|
<dc:subject>Biodiversity</dc:subject>
|
||||||
|
<dc:subject>Agriculture</dc:subject>
|
||||||
|
<dc:subject>Elevation</dc:subject>
|
||||||
|
<dc:subject>Slope</dc:subject>
|
||||||
|
<dc:subject>Ecosystem Service trade-offs and synergies</dc:subject>
|
||||||
|
<dc:subject> cultural services</dc:subject>
|
||||||
|
<dc:subject>provisioning services</dc:subject>
|
||||||
|
<dc:subject>regulating services</dc:subject>
|
||||||
|
<dc:subject>supporting services</dc:subject>
|
||||||
|
<dc:type>Research Article</dc:type>
|
||||||
|
<!--<dr:CobjCategory type="publication">0001</dr:CobjCategory>-->
|
||||||
|
<dr:CobjCategory>0020</dr:CobjCategory>
|
||||||
|
<oaf:dateAccepted>2017-01-01</oaf:dateAccepted>
|
||||||
|
<oaf:projectid>corda_______::226852</oaf:projectid>
|
||||||
|
<oaf:accessrights>OPEN</oaf:accessrights>
|
||||||
|
<oaf:hostedBy id="openaire____::issn226852" name="One Ecosystem"/>
|
||||||
|
<oaf:collectedFrom
|
||||||
|
id="openaire____::45e3c7b69bcee6cc5fa945c9e183deb9" name="Pensoft"/>
|
||||||
|
<oaf:identifier identifierType="doi">10.3897/oneeco.2.e13718</oaf:identifier>
|
||||||
|
<oaf:fulltext>https://oneecosystem.pensoft.net/article/13718/</oaf:fulltext>
|
||||||
|
<oaf:journal eissn="2367-8194" issn="">One Ecosystem</oaf:journal>
|
||||||
|
<oaf:refereed>0001</oaf:refereed>
|
||||||
|
</metadata>
|
||||||
|
<about xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
||||||
|
<originDescription altered="true" harvestDate="2020-03-23T00:20:51.392Z">
|
||||||
|
<baseURL>http%3A%2F%2Fzookeys.pensoft.net%2Foai.php</baseURL>
|
||||||
|
<identifier>10.3897/oneeco.2.e13718</identifier>
|
||||||
|
<datestamp>2017-09-08</datestamp>
|
||||||
|
<metadataNamespace>http://www.openarchives.org/OAI/2.0/oai_dc/</metadataNamespace>
|
||||||
|
</originDescription>
|
||||||
|
</provenance>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.9</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction classid="sysimport:crosswalk:repository"
|
||||||
|
classname="sysimport:crosswalk:repository"
|
||||||
|
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</about>
|
||||||
|
</record>
|
|
@ -96,8 +96,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "trust",
|
"field": "trust",
|
||||||
"type": "string",
|
"type": "double",
|
||||||
"value": "0.9"
|
"value": 0.9
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "inferenceprovenance",
|
"field": "inferenceprovenance",
|
||||||
|
|
|
@ -41,8 +41,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "trust",
|
"field": "trust",
|
||||||
"type": "string",
|
"type": "double",
|
||||||
"value": "0.9"
|
"value": 0.9
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "inferenceprovenance",
|
"field": "inferenceprovenance",
|
||||||
|
|
|
@ -86,8 +86,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "trust",
|
"field": "trust",
|
||||||
"type": "string",
|
"type": "double",
|
||||||
"value": "0.9"
|
"value": 0.9
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "inferenceprovenance",
|
"field": "inferenceprovenance",
|
||||||
|
|
|
@ -2,12 +2,11 @@
|
||||||
package eu.dnetlib.dhp.oa.provision;
|
package eu.dnetlib.dhp.oa.provision;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.utils.DHPUtils.toSeq;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -28,13 +27,11 @@ import com.google.common.collect.Maps;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.*;
|
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
|
||||||
|
import eu.dnetlib.dhp.oa.provision.model.ProvisionModelSupport;
|
||||||
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
|
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
|
||||||
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
|
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
import scala.collection.JavaConverters;
|
|
||||||
import scala.collection.Seq;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* XmlConverterJob converts the JoinedEntities as XML records
|
* XmlConverterJob converts the JoinedEntities as XML records
|
||||||
|
@ -43,8 +40,6 @@ public class XmlConverterJob {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(XmlConverterJob.class);
|
private static final Logger log = LoggerFactory.getLogger(XmlConverterJob.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
||||||
|
|
||||||
public static final String schemaLocation = "https://www.openaire.eu/schema/1.0/oaf-1.0.xsd";
|
public static final String schemaLocation = "https://www.openaire.eu/schema/1.0/oaf-1.0.xsd";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
@ -129,10 +124,6 @@ public class XmlConverterJob {
|
||||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Seq<String> toSeq(List<String> list) {
|
|
||||||
return JavaConverters.asScalaIteratorConverter(list.iterator()).asScala().toSeq();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Map<String, LongAccumulator> prepareAccumulators(SparkContext sc) {
|
private static Map<String, LongAccumulator> prepareAccumulators(SparkContext sc) {
|
||||||
Map<String, LongAccumulator> accumulators = Maps.newHashMap();
|
Map<String, LongAccumulator> accumulators = Maps.newHashMap();
|
||||||
accumulators
|
accumulators
|
||||||
|
|
6
pom.xml
6
pom.xml
|
@ -50,7 +50,7 @@
|
||||||
<repository>
|
<repository>
|
||||||
<id>dnet45-releases</id>
|
<id>dnet45-releases</id>
|
||||||
<name>D-Net 45 releases</name>
|
<name>D-Net 45 releases</name>
|
||||||
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases</url>
|
<url>https://maven.d4science.org/nexus/content/repositories/dnet45-releases</url>
|
||||||
<layout>default</layout>
|
<layout>default</layout>
|
||||||
<snapshots>
|
<snapshots>
|
||||||
<enabled>false</enabled>
|
<enabled>false</enabled>
|
||||||
|
@ -651,12 +651,12 @@
|
||||||
<snapshotRepository>
|
<snapshotRepository>
|
||||||
<id>dnet45-snapshots</id>
|
<id>dnet45-snapshots</id>
|
||||||
<name>DNet45 Snapshots</name>
|
<name>DNet45 Snapshots</name>
|
||||||
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-snapshots</url>
|
<url>https://maven.d4science.org/nexus/content/repositories/dnet45-snapshots</url>
|
||||||
<layout>default</layout>
|
<layout>default</layout>
|
||||||
</snapshotRepository>
|
</snapshotRepository>
|
||||||
<repository>
|
<repository>
|
||||||
<id>dnet45-releases</id>
|
<id>dnet45-releases</id>
|
||||||
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases</url>
|
<url>https://maven.d4science.org/nexus/content/repositories/dnet45-releases</url>
|
||||||
</repository>
|
</repository>
|
||||||
</distributionManagement>
|
</distributionManagement>
|
||||||
<reporting>
|
<reporting>
|
||||||
|
|
Loading…
Reference in New Issue