Compare commits
91 Commits
master
...
oaf_countr
Author | SHA1 | Date |
---|---|---|
Claudio Atzori | 538b180fe0 | |
Claudio Atzori | eae88c0fe3 | |
Claudio Atzori | 82fc609c4f | |
Claudio Atzori | 4b978ffa2d | |
Claudio Atzori | fa4b3e6d2b | |
Claudio Atzori | 74e5d05577 | |
Claudio Atzori | 6c3b692f60 | |
Claudio Atzori | e9eb590f87 | |
Claudio Atzori | 9a5b134ddf | |
Claudio Atzori | 069803f34a | |
Claudio Atzori | 71c1f81b54 | |
Claudio Atzori | c3c9bdb59c | |
Claudio Atzori | 91b61687fa | |
Claudio Atzori | 63067d4b24 | |
Claudio Atzori | e0c315b07b | |
Claudio Atzori | 54936b7f42 | |
Michele Artini | e1149eb5c4 | |
Michele Artini | 3f174ad90f | |
Michele Artini | 6ffb1faf09 | |
Giambattista Bloisi | 3f22c101d9 | |
Giambattista Bloisi | 0ff7faad72 | |
Michele Artini | 7faa115ba0 | |
Michele Artini | f9c74c98fa | |
Claudio Atzori | 7ae7e8aa06 | |
Giambattista Bloisi | 664a381d31 | |
Michele Artini | cb29b9773c | |
Michele Artini | 85b844d57e | |
Michele Artini | 455f2e1e07 | |
Michele Artini | 30167aa882 | |
Michele Artini | 88fef367b9 | |
Claudio Atzori | 078169b922 | |
Claudio Atzori | af154d4456 | |
Claudio Atzori | 7863c92466 | |
Claudio Atzori | eb5887cb9a | |
Miriam Baglioni | 5a32bb9578 | |
Miriam Baglioni | 48c052215c | |
Claudio Atzori | db66555ebb | |
Giambattista Bloisi | 9092075760 | |
Claudio Atzori | d4871b31e8 | |
Miriam Baglioni | 5180b6ec8a | |
Miriam Baglioni | 7827a2d66b | |
Miriam Baglioni | fd34372c40 | |
Giambattista Bloisi | 3cd5590f3b | |
Giambattista Bloisi | 56dd05f85c | |
Claudio Atzori | 6fcf872daa | |
Claudio Atzori | 3f07390a58 | |
Sandro La Bruzzo | 7d806a434c | |
Sandro La Bruzzo | e468e99100 | |
Sandro La Bruzzo | b63994dcc4 | |
Sandro La Bruzzo | 915a76a796 | |
Giambattista Bloisi | 773e856550 | |
Sandro La Bruzzo | a712df1e1d | |
Sandro La Bruzzo | b32a9d1994 | |
Michele Artini | 3268570b2c | |
Miriam Baglioni | 72bae7af76 | |
Serafeim Chatzopoulos | f0dc12634b | |
Claudio Atzori | 753c2a72bd | |
Claudio Atzori | a63b091bae | |
Giambattista Bloisi | 85aeff72f1 | |
Giambattista Bloisi | d65285da7f | |
Giambattista Bloisi | 29194472a7 | |
Miriam Baglioni | eca021f4d6 | |
Miriam Baglioni | bdb6bbb365 | |
Claudio Atzori | d85d2df6ad | |
Giambattista Bloisi | b19643f6eb | |
Claudio Atzori | e6bdee86d1 | |
Claudio Atzori | 38c9001147 | |
Claudio Atzori | fd17c1f17c | |
Claudio Atzori | 009dcf6aea | |
Claudio Atzori | bb82052c40 | |
Claudio Atzori | 42f5506306 | |
Alessia Bardi | f2a08d8cc2 | |
Miriam Baglioni | 07a373a0bd | |
Miriam Baglioni | ead08b0dd4 | |
Miriam Baglioni | a5995ab557 | |
Sandro La Bruzzo | 9aebca77a0 | |
Sandro La Bruzzo | 0386f36385 | |
Sandro La Bruzzo | 43e0bba7ed | |
Miriam Baglioni | f7d06dc661 | |
Miriam Baglioni | 6e58d79623 | |
Miriam Baglioni | e0ec800d7e | |
Sandro La Bruzzo | e0753f19da | |
sandro.labruzzo | e328bc0ade | |
Sandro La Bruzzo | 859babf722 | |
Sandro La Bruzzo | 39ebb60b38 | |
Sandro La Bruzzo | 9d5a7c3b22 | |
Sandro La Bruzzo | 8f61063201 | |
Sandro La Bruzzo | 1a42a5c10d | |
Miriam Baglioni | 624f5f3f21 | |
Miriam Baglioni | 354e02e6a9 | |
Miriam Baglioni | b00771c7cc |
|
@ -145,105 +145,6 @@ public class AuthorMerger {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This method tries to figure out when two author are the same in the contest
|
|
||||||
* of ORCID enrichment
|
|
||||||
*
|
|
||||||
* @param left Author in the OAF entity
|
|
||||||
* @param right Author ORCID
|
|
||||||
* @return based on a heuristic on the names of the authors if they are the same.
|
|
||||||
*/
|
|
||||||
public static boolean checkORCIDSimilarity(final Author left, final Author right) {
|
|
||||||
final Person pl = parse(left);
|
|
||||||
final Person pr = parse(right);
|
|
||||||
|
|
||||||
// If one of them didn't have a surname we verify if they have the fullName not empty
|
|
||||||
// and verify if the normalized version is equal
|
|
||||||
if (!(pl.getSurname() != null && pl.getSurname().stream().anyMatch(StringUtils::isNotBlank) &&
|
|
||||||
pr.getSurname() != null && pr.getSurname().stream().anyMatch(StringUtils::isNotBlank))) {
|
|
||||||
|
|
||||||
if (pl.getFullname() != null && !pl.getFullname().isEmpty() && pr.getFullname() != null
|
|
||||||
&& !pr.getFullname().isEmpty()) {
|
|
||||||
return pl
|
|
||||||
.getFullname()
|
|
||||||
.stream()
|
|
||||||
.anyMatch(
|
|
||||||
fl -> pr.getFullname().stream().anyMatch(fr -> normalize(fl).equalsIgnoreCase(normalize(fr))));
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// The Authors have one surname in common
|
|
||||||
if (pl.getSurname().stream().anyMatch(sl -> pr.getSurname().stream().anyMatch(sr -> sr.equalsIgnoreCase(sl)))) {
|
|
||||||
|
|
||||||
// If one of them has only a surname and is the same we can say that they are the same author
|
|
||||||
if ((pl.getName() == null || pl.getName().stream().allMatch(StringUtils::isBlank)) ||
|
|
||||||
(pr.getName() == null || pr.getName().stream().allMatch(StringUtils::isBlank)))
|
|
||||||
return true;
|
|
||||||
// The authors have the same initials of Name in common
|
|
||||||
if (pl
|
|
||||||
.getName()
|
|
||||||
.stream()
|
|
||||||
.anyMatch(
|
|
||||||
nl -> pr
|
|
||||||
.getName()
|
|
||||||
.stream()
|
|
||||||
.anyMatch(nr -> nr.equalsIgnoreCase(nl))))
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sometimes we noticed that publication have author wrote in inverse order Surname, Name
|
|
||||||
// We verify if we have an exact match between name and surname
|
|
||||||
if (pl.getSurname().stream().anyMatch(sl -> pr.getName().stream().anyMatch(nr -> nr.equalsIgnoreCase(sl))) &&
|
|
||||||
pl.getName().stream().anyMatch(nl -> pr.getSurname().stream().anyMatch(sr -> sr.equalsIgnoreCase(nl))))
|
|
||||||
return true;
|
|
||||||
else
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
//
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to enrich ORCID information in one list of authors based on another list
|
|
||||||
*
|
|
||||||
* @param baseAuthor the Author List in the OAF Entity
|
|
||||||
* @param orcidAuthor The list of ORCID Author intersected
|
|
||||||
* @return The Author List of the OAF Entity enriched with the orcid Author
|
|
||||||
*/
|
|
||||||
public static List<Author> enrichOrcid(List<Author> baseAuthor, List<Author> orcidAuthor) {
|
|
||||||
|
|
||||||
if (baseAuthor == null || baseAuthor.isEmpty())
|
|
||||||
return orcidAuthor;
|
|
||||||
|
|
||||||
if (orcidAuthor == null || orcidAuthor.isEmpty())
|
|
||||||
return baseAuthor;
|
|
||||||
|
|
||||||
if (baseAuthor.size() == 1 && orcidAuthor.size() > 10)
|
|
||||||
return baseAuthor;
|
|
||||||
|
|
||||||
final List<Author> oAuthor = new ArrayList<>();
|
|
||||||
oAuthor.addAll(orcidAuthor);
|
|
||||||
|
|
||||||
baseAuthor.forEach(ba -> {
|
|
||||||
Optional<Author> aMatch = oAuthor.stream().filter(oa -> checkORCIDSimilarity(ba, oa)).findFirst();
|
|
||||||
if (aMatch.isPresent()) {
|
|
||||||
final Author sameAuthor = aMatch.get();
|
|
||||||
addPid(ba, sameAuthor.getPid());
|
|
||||||
oAuthor.remove(sameAuthor);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return baseAuthor;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void addPid(final Author a, final List<StructuredProperty> pids) {
|
|
||||||
|
|
||||||
if (a.getPid() == null) {
|
|
||||||
a.setPid(new ArrayList<>());
|
|
||||||
}
|
|
||||||
|
|
||||||
a.getPid().addAll(pids);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String pidToComparableString(StructuredProperty pid) {
|
public static String pidToComparableString(StructuredProperty pid) {
|
||||||
final String classid = pid.getQualifier().getClassid() != null ? pid.getQualifier().getClassid().toLowerCase()
|
final String classid = pid.getQualifier().getClassid() != null ? pid.getQualifier().getClassid().toLowerCase()
|
||||||
: "";
|
: "";
|
||||||
|
|
|
@ -1,24 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.merge;
|
package eu.dnetlib.dhp.oa.merge;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
||||||
import static org.apache.spark.sql.functions.col;
|
|
||||||
import static org.apache.spark.sql.functions.when;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.ForkJoinPool;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.spark.SparkConf;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.api.java.function.ReduceFunction;
|
|
||||||
import org.apache.spark.sql.*;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||||
|
@ -26,12 +8,29 @@ import eu.dnetlib.dhp.schema.common.EntityType;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.GraphCleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.GraphCleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ReduceFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static org.apache.spark.sql.functions.col;
|
||||||
|
import static org.apache.spark.sql.functions.when;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Groups the graph content by entity identifier to ensure ID uniqueness
|
* Groups the graph content by entity identifier to ensure ID uniqueness
|
||||||
*/
|
*/
|
||||||
|
@ -135,7 +134,7 @@ public class GroupEntitiesSparkJob {
|
||||||
.applyCoarVocabularies(entity, vocs),
|
.applyCoarVocabularies(entity, vocs),
|
||||||
OAFENTITY_KRYO_ENC)
|
OAFENTITY_KRYO_ENC)
|
||||||
.groupByKey((MapFunction<OafEntity, String>) OafEntity::getId, Encoders.STRING())
|
.groupByKey((MapFunction<OafEntity, String>) OafEntity::getId, Encoders.STRING())
|
||||||
.reduceGroups((ReduceFunction<OafEntity>) OafMapperUtils::mergeEntities)
|
.reduceGroups((ReduceFunction<OafEntity>) MergeUtils::checkedMerge)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<String, OafEntity>, Tuple2<String, OafEntity>>) t -> new Tuple2<>(
|
(MapFunction<Tuple2<String, OafEntity>, Tuple2<String, OafEntity>>) t -> new Tuple2<>(
|
||||||
t._2().getClass().getName(), t._2()),
|
t._2().getClass().getName(), t._2()),
|
||||||
|
|
|
@ -312,7 +312,8 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value instanceof Datasource) {
|
if (value instanceof Datasource) {
|
||||||
// nothing to evaluate here
|
final Datasource d = (Datasource) value;
|
||||||
|
return Objects.nonNull(d.getOfficialname()) && StringUtils.isNotBlank(d.getOfficialname().getValue());
|
||||||
} else if (value instanceof Project) {
|
} else if (value instanceof Project) {
|
||||||
final Project p = (Project) value;
|
final Project p = (Project) value;
|
||||||
return Objects.nonNull(p.getCode()) && StringUtils.isNotBlank(p.getCode().getValue());
|
return Objects.nonNull(p.getCode()) && StringUtils.isNotBlank(p.getCode().getValue());
|
||||||
|
@ -505,6 +506,8 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.filter(sp -> StringUtils.isNotBlank(sp.getValue()))
|
.filter(sp -> StringUtils.isNotBlank(sp.getValue()))
|
||||||
.map(GraphCleaningFunctions::cleanValue)
|
.map(GraphCleaningFunctions::cleanValue)
|
||||||
|
.sorted((s1, s2) -> s2.getValue().length() - s1.getValue().length())
|
||||||
|
.limit(ModelHardLimits.MAX_ABSTRACTS)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) {
|
if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) {
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
package eu.dnetlib.dhp.schema.oaf.utils;
|
||||||
|
|
||||||
|
//
|
||||||
|
// Source code recreated from a .class file by IntelliJ IDEA
|
||||||
|
// (powered by FernFlower decompiler)
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class MergeComparator implements Comparator<Oaf> {
|
||||||
|
public MergeComparator() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public int compare(Oaf left, Oaf right) {
|
||||||
|
// nulls at the end
|
||||||
|
if (left == null && right == null) {
|
||||||
|
return 0;
|
||||||
|
} else if (left == null) {
|
||||||
|
return -1;
|
||||||
|
} else if (right == null) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// invisible
|
||||||
|
if (left.getDataInfo() != null && left.getDataInfo().getInvisible() == true) {
|
||||||
|
if (right.getDataInfo() != null && right.getDataInfo().getInvisible() == false) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectedfrom
|
||||||
|
HashSet<String> lCf = getCollectedFromIds(left);
|
||||||
|
HashSet<String> rCf = getCollectedFromIds(right);
|
||||||
|
if (lCf.contains("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2") && !rCf.contains("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2")) {
|
||||||
|
return -1;
|
||||||
|
} else if (!lCf.contains("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2") && rCf.contains("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2")) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
SubEntityType lClass = SubEntityType.fromClass(left.getClass());
|
||||||
|
SubEntityType rClass = SubEntityType.fromClass(right.getClass());
|
||||||
|
return lClass.ordinal() - rClass.ordinal();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
protected HashSet<String> getCollectedFromIds(Oaf left) {
|
||||||
|
return (HashSet) Optional.ofNullable(left.getCollectedfrom()).map((cf) -> {
|
||||||
|
return (HashSet) cf.stream().map(KeyValue::getKey).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
}).orElse(new HashSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SubEntityType {
|
||||||
|
publication, dataset, software, otherresearchproduct, datasource, organization, project;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolves the EntityType, given the relative class name
|
||||||
|
*
|
||||||
|
* @param clazz the given class name
|
||||||
|
* @param <T> actual OafEntity subclass
|
||||||
|
* @return the EntityType associated to the given class
|
||||||
|
*/
|
||||||
|
public static <T extends Oaf> SubEntityType fromClass(Class<T> clazz) {
|
||||||
|
return valueOf(clazz.getSimpleName().toLowerCase());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,707 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf.utils;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.AccessRightComparator;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||||
|
import org.apache.commons.lang3.tuple.Pair;
|
||||||
|
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static com.google.common.base.Objects.firstNonNull;
|
||||||
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
|
|
||||||
|
public class MergeUtils {
|
||||||
|
|
||||||
|
public static <T extends Oaf> T checkedMerge(final T left, final T right) {
|
||||||
|
return (T) merge(left, right, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Oaf merge(final Oaf left, final Oaf right) {
|
||||||
|
return merge(left, right, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Oaf merge(final Oaf left, final Oaf right, boolean checkDelegatedAuthority) {
|
||||||
|
if (sameClass(left, right, OafEntity.class)) {
|
||||||
|
return mergeEntities(left, right, checkDelegatedAuthority);
|
||||||
|
} else if (sameClass(left, right, Relation.class)) {
|
||||||
|
return mergeRelation((Relation) left, (Relation) right);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException(
|
||||||
|
String
|
||||||
|
.format(
|
||||||
|
"MERGE_FROM_AND_GET incompatible types: %s, %s",
|
||||||
|
left.getClass().getCanonicalName(), right.getClass().getCanonicalName()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Oaf> boolean sameClass(Object left, Object right, Class<T> cls) {
|
||||||
|
return cls.isAssignableFrom(left.getClass()) && cls.isAssignableFrom(right.getClass());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Oaf mergeEntities(Oaf left, Oaf right, boolean checkDelegatedAuthority) {
|
||||||
|
|
||||||
|
if (sameClass(left, right, Result.class)) {
|
||||||
|
if (!left.getClass().equals(right.getClass()) || checkDelegatedAuthority) {
|
||||||
|
return mergeResultsOfDifferentTypes((Result)left, (Result) right);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sameClass(left, right, Publication.class)) {
|
||||||
|
return mergePublication((Publication) left, (Publication) right);
|
||||||
|
}
|
||||||
|
if (sameClass(left, right, Dataset.class)) {
|
||||||
|
return mergeDataset((Dataset) left, (Dataset) right);
|
||||||
|
}
|
||||||
|
if (sameClass(left, right, OtherResearchProduct.class)) {
|
||||||
|
return mergeORP((OtherResearchProduct) left, (OtherResearchProduct) right);
|
||||||
|
}
|
||||||
|
if (sameClass(left, right, Software.class)) {
|
||||||
|
return mergeSoftware((Software) left, (Software) right);
|
||||||
|
}
|
||||||
|
|
||||||
|
return mergeResult((Result) left, (Result) right);
|
||||||
|
} else if (sameClass(left, right, Datasource.class)) {
|
||||||
|
// TODO
|
||||||
|
final int trust = compareTrust(left, right);
|
||||||
|
return mergeOafEntityFields((Datasource) left, (Datasource) right, trust);
|
||||||
|
} else if (sameClass(left, right, Organization.class)) {
|
||||||
|
return mergeOrganization((Organization) left, (Organization) right);
|
||||||
|
} else if (sameClass(left, right, Project.class)) {
|
||||||
|
return mergeProject((Project) left, (Project) right);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException(
|
||||||
|
String
|
||||||
|
.format(
|
||||||
|
"MERGE_FROM_AND_GET incompatible types: %s, %s",
|
||||||
|
left.getClass().getCanonicalName(), right.getClass().getCanonicalName()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is used in the global result grouping phase. It checks if one of the two is from a delegated authority
|
||||||
|
* https://graph.openaire.eu/docs/data-model/pids-and-identifiers#delegated-authorities and in that case it prefers
|
||||||
|
* such version.
|
||||||
|
* <p>
|
||||||
|
* Otherwise, it considers a resulttype priority order implemented in {@link ResultTypeComparator}
|
||||||
|
* and proceeds with the canonical property merging.
|
||||||
|
*
|
||||||
|
* @param left
|
||||||
|
* @param right
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private static <T extends Result> T mergeResultsOfDifferentTypes(T left, T right) {
|
||||||
|
|
||||||
|
final boolean leftFromDelegatedAuthority = isFromDelegatedAuthority(left);
|
||||||
|
final boolean rightFromDelegatedAuthority = isFromDelegatedAuthority(right);
|
||||||
|
|
||||||
|
if (leftFromDelegatedAuthority && !rightFromDelegatedAuthority) {
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
if (!leftFromDelegatedAuthority && rightFromDelegatedAuthority) {
|
||||||
|
return right;
|
||||||
|
}
|
||||||
|
//TODO: raise trust to have preferred fields from one or the other??
|
||||||
|
if (new ResultTypeComparator().compare(left, right) < 0) {
|
||||||
|
return mergeResult(left, right);
|
||||||
|
} else {
|
||||||
|
return mergeResult(right, left);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DataInfo chooseDataInfo(DataInfo left, DataInfo right, int trust) {
|
||||||
|
if (trust > 0) {
|
||||||
|
return left;
|
||||||
|
} else if (trust == 0) {
|
||||||
|
if (left == null || (left.getInvisible() != null && left.getInvisible().equals(Boolean.TRUE))) {
|
||||||
|
return right;
|
||||||
|
} else {
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return right;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String chooseString(String left, String right, int trust) {
|
||||||
|
if (trust > 0) {
|
||||||
|
return left;
|
||||||
|
} else if (trust == 0) {
|
||||||
|
return StringUtils.isNotBlank(left) ? left : right;
|
||||||
|
} else {
|
||||||
|
return right;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T> T chooseReference(T left, T right, int trust) {
|
||||||
|
if (trust > 0) {
|
||||||
|
return left;
|
||||||
|
} else if (trust == 0) {
|
||||||
|
return left != null ? left : right;
|
||||||
|
} else {
|
||||||
|
return right;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Long max(Long left, Long right) {
|
||||||
|
if (left == null)
|
||||||
|
return right;
|
||||||
|
if (right == null)
|
||||||
|
return left;
|
||||||
|
|
||||||
|
return Math.max(left, right);
|
||||||
|
}
|
||||||
|
|
||||||
|
// trust ??
|
||||||
|
private static Boolean booleanOR(Boolean a, Boolean b) {
|
||||||
|
if (a == null) {
|
||||||
|
return b;
|
||||||
|
} else if (b == null) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
return a || b;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static <T> List<T> unionDistinctLists(final List<T> left, final List<T> right, int trust) {
|
||||||
|
if (left == null) {
|
||||||
|
return right;
|
||||||
|
} else if (right == null) {
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<T> h = trust >= 0 ? left : right;
|
||||||
|
List<T> l = trust >= 0 ? right : left;
|
||||||
|
|
||||||
|
return Stream.concat(h.stream(), l.stream())
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<String> unionDistinctListOfString(final List<String> l, final List<String> r) {
|
||||||
|
if (l == null) {
|
||||||
|
return r;
|
||||||
|
} else if (r == null) {
|
||||||
|
return l;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Stream.concat(l.stream(), r.stream())
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO review
|
||||||
|
private static List<KeyValue> mergeKeyValue(List<KeyValue> left, List<KeyValue> right, int trust) {
|
||||||
|
if (trust < 0) {
|
||||||
|
List<KeyValue> s = left;
|
||||||
|
left = right;
|
||||||
|
right = s;
|
||||||
|
}
|
||||||
|
|
||||||
|
HashMap<String, KeyValue> values = new HashMap<>();
|
||||||
|
left.forEach(kv -> values.put(kv.getKey(), kv));
|
||||||
|
right.forEach(kv -> values.putIfAbsent(kv.getKey(), kv));
|
||||||
|
|
||||||
|
return new ArrayList<>(values.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<StructuredProperty> unionTitle(List<StructuredProperty> left, List<StructuredProperty> right, int trust) {
|
||||||
|
if (left == null) {
|
||||||
|
return right;
|
||||||
|
} else if (right == null) {
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<StructuredProperty> h = trust >= 0 ? left : right;
|
||||||
|
List<StructuredProperty> l = trust >= 0 ? right : left;
|
||||||
|
|
||||||
|
return Stream.concat(h.stream(), l.stream())
|
||||||
|
.filter(Objects::isNull)
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal utility that merges the common OafEntity fields
|
||||||
|
*
|
||||||
|
* @param merged
|
||||||
|
* @param enrich
|
||||||
|
* @param <T>
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private static <T extends Oaf> T mergeOafFields(T merged, T enrich, int trust) {
|
||||||
|
|
||||||
|
//TODO: union of all values, but what does it mean with KeyValue pairs???
|
||||||
|
merged.setCollectedfrom(mergeKeyValue(merged.getCollectedfrom(), enrich.getCollectedfrom(), trust));
|
||||||
|
merged.setDataInfo(chooseDataInfo(merged.getDataInfo(), enrich.getDataInfo(), trust));
|
||||||
|
merged.setLastupdatetimestamp(max(merged.getLastupdatetimestamp(), enrich.getLastupdatetimestamp()));
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal utility that merges the common OafEntity fields
|
||||||
|
*
|
||||||
|
* @param original
|
||||||
|
* @param enrich
|
||||||
|
* @param <T>
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private static <T extends OafEntity> T mergeOafEntityFields(T original, T enrich, int trust) {
|
||||||
|
final T merged = mergeOafFields(original, enrich, trust);
|
||||||
|
|
||||||
|
merged.setOriginalId(unionDistinctListOfString(merged.getOriginalId(), enrich.getOriginalId()));
|
||||||
|
merged.setPid(unionDistinctLists(merged.getPid(), enrich.getPid(), trust));
|
||||||
|
// dateofcollection mettere today quando si fa merge
|
||||||
|
merged.setDateofcollection(chooseString(merged.getDateofcollection(), enrich.getDateofcollection(), trust));
|
||||||
|
// setDateoftransformation mettere vuota in dedup, nota per Claudio
|
||||||
|
merged.setDateoftransformation(chooseString(merged.getDateoftransformation(), enrich.getDateoftransformation(), trust));
|
||||||
|
// TODO: was missing in OafEntity.merge
|
||||||
|
merged.setExtraInfo(unionDistinctLists(merged.getExtraInfo(), enrich.getExtraInfo(), trust));
|
||||||
|
//oaiprovenanze da mettere a null quando si genera merge
|
||||||
|
merged.setOaiprovenance(chooseReference(merged.getOaiprovenance(), enrich.getOaiprovenance(), trust));
|
||||||
|
merged.setMeasures(unionDistinctLists(merged.getMeasures(), enrich.getMeasures(), trust));
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static <T extends Relation> T mergeRelation(T original, T enrich) {
|
||||||
|
int trust = compareTrust(original, enrich);
|
||||||
|
T merge = mergeOafFields(original, enrich, trust);
|
||||||
|
|
||||||
|
checkArgument(Objects.equals(merge.getSource(), enrich.getSource()), "source ids must be equal");
|
||||||
|
checkArgument(Objects.equals(merge.getTarget(), enrich.getTarget()), "target ids must be equal");
|
||||||
|
checkArgument(Objects.equals(merge.getRelType(), enrich.getRelType()), "relType(s) must be equal");
|
||||||
|
checkArgument(
|
||||||
|
Objects.equals(merge.getSubRelType(), enrich.getSubRelType()), "subRelType(s) must be equal");
|
||||||
|
checkArgument(Objects.equals(merge.getRelClass(), enrich.getRelClass()), "relClass(es) must be equal");
|
||||||
|
|
||||||
|
//merge.setProvenance(mergeLists(merge.getProvenance(), enrich.getProvenance()));
|
||||||
|
|
||||||
|
//TODO: trust ??
|
||||||
|
merge.setValidated(booleanOR(merge.getValidated(), enrich.getValidated()));
|
||||||
|
try {
|
||||||
|
merge.setValidationDate(ModelSupport.oldest(merge.getValidationDate(), enrich.getValidationDate()));
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new IllegalArgumentException(String
|
||||||
|
.format(
|
||||||
|
"invalid validation date format in relation [s:%s, t:%s]: %s", merge.getSource(),
|
||||||
|
merge.getTarget(),
|
||||||
|
merge.getValidationDate()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO keyvalue merge
|
||||||
|
merge.setProperties(mergeKeyValue(merge.getProperties(), enrich.getProperties(), trust));
|
||||||
|
|
||||||
|
return merge;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Result> T mergeResult(T original, T enrich) {
|
||||||
|
final int trust = compareTrust(original, enrich);
|
||||||
|
T merge = mergeOafEntityFields(original, enrich, trust);
|
||||||
|
|
||||||
|
if (merge.getProcessingchargeamount() == null || StringUtils.isBlank(merge.getProcessingchargeamount().getValue())) {
|
||||||
|
merge.setProcessingchargeamount(enrich.getProcessingchargeamount());
|
||||||
|
merge.setProcessingchargecurrency(enrich.getProcessingchargecurrency());
|
||||||
|
}
|
||||||
|
|
||||||
|
// author = usare la stessa logica che in dedup
|
||||||
|
merge.setAuthor(chooseReference(merge.getAuthor(), enrich.getAuthor(), trust));
|
||||||
|
// il primo che mi arriva secondo l'ordinamento per priorita'
|
||||||
|
merge.setResulttype(chooseReference(merge.getResulttype(), enrich.getResulttype(), trust));
|
||||||
|
// gestito come il resulttype perche' e' un subtype
|
||||||
|
merge.setMetaResourceType(chooseReference(merge.getMetaResourceType(), enrich.getMetaResourceType(), trust));
|
||||||
|
// spostiamo nell'instance e qui prendo il primo che arriva
|
||||||
|
merge.setLanguage(chooseReference(merge.getLanguage(), enrich.getLanguage(), trust));
|
||||||
|
// country lasicamo,o cosi' -> parentesi sul datainfo
|
||||||
|
merge.setCountry(unionDistinctLists(merge.getCountry(), enrich.getCountry(), trust));
|
||||||
|
//ok
|
||||||
|
merge.setSubject(unionDistinctLists(merge.getSubject(), enrich.getSubject(), trust));
|
||||||
|
// union per priority quindi vanno in append
|
||||||
|
merge.setTitle(unionTitle(merge.getTitle(), enrich.getTitle(), trust));
|
||||||
|
//ok
|
||||||
|
merge.setRelevantdate(unionDistinctLists(merge.getRelevantdate(), enrich.getRelevantdate(), trust));
|
||||||
|
// prima trust e poi longest list
|
||||||
|
merge.setDescription(longestLists(merge.getDescription(), enrich.getDescription()));
|
||||||
|
// trust piu' alto e poi piu' vecchia
|
||||||
|
merge.setDateofacceptance(chooseReference(merge.getDateofacceptance(), enrich.getDateofacceptance(), trust));
|
||||||
|
// ok, ma publisher va messo ripetibile
|
||||||
|
merge.setPublisher(chooseReference(merge.getPublisher(), enrich.getPublisher(), trust));
|
||||||
|
// ok
|
||||||
|
merge.setEmbargoenddate(chooseReference(merge.getEmbargoenddate(), enrich.getEmbargoenddate(), trust));
|
||||||
|
// ok
|
||||||
|
merge.setSource(unionDistinctLists(merge.getSource(), enrich.getSource(), trust));
|
||||||
|
// ok
|
||||||
|
merge.setFulltext(unionDistinctLists(merge.getFulltext(), enrich.getFulltext(), trust));
|
||||||
|
// ok
|
||||||
|
merge.setFormat(unionDistinctLists(merge.getFormat(), enrich.getFormat(), trust));
|
||||||
|
// ok
|
||||||
|
merge.setContributor(unionDistinctLists(merge.getContributor(), enrich.getContributor(), trust));
|
||||||
|
|
||||||
|
// prima prendo l'higher trust, su questo prendo il valore migliore nelle istanze TODO
|
||||||
|
// trust maggiore ma a parita' di trust il piu' specifico (base del vocabolario)
|
||||||
|
// vedi note
|
||||||
|
merge.setResourcetype(firstNonNull(merge.getResourcetype(), enrich.getResourcetype()));
|
||||||
|
|
||||||
|
// ok
|
||||||
|
merge.setCoverage(unionDistinctLists(merge.getCoverage(), enrich.getCoverage(), trust));
|
||||||
|
|
||||||
|
// most open ok
|
||||||
|
if (enrich.getBestaccessright() != null
|
||||||
|
&& new AccessRightComparator<>()
|
||||||
|
.compare(enrich.getBestaccessright(), merge.getBestaccessright()) < 0) {
|
||||||
|
merge.setBestaccessright(enrich.getBestaccessright());
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO merge of datainfo given same id
|
||||||
|
merge.setContext(unionDistinctLists(merge.getContext(), enrich.getContext(), trust));
|
||||||
|
|
||||||
|
//ok
|
||||||
|
merge.setExternalReference(unionDistinctLists(merge.getExternalReference(), enrich.getExternalReference(), trust));
|
||||||
|
|
||||||
|
//instance enrichment or union
|
||||||
|
// review instance equals => add pid to comparision
|
||||||
|
if (!isAnEnrichment(merge) && !isAnEnrichment(enrich))
|
||||||
|
merge.setInstance(unionDistinctLists(merge.getInstance(), enrich.getInstance(), trust));
|
||||||
|
else {
|
||||||
|
final List<Instance> enrichmentInstances = isAnEnrichment(merge) ? merge.getInstance()
|
||||||
|
: enrich.getInstance();
|
||||||
|
final List<Instance> enrichedInstances = isAnEnrichment(merge) ? enrich.getInstance()
|
||||||
|
: merge.getInstance();
|
||||||
|
if (isAnEnrichment(merge))
|
||||||
|
merge.setDataInfo(enrich.getDataInfo());
|
||||||
|
merge.setInstance(enrichInstances(enrichedInstances, enrichmentInstances));
|
||||||
|
}
|
||||||
|
|
||||||
|
merge.setEoscifguidelines(unionDistinctLists(merge.getEoscifguidelines(), enrich.getEoscifguidelines(), trust));
|
||||||
|
merge.setIsGreen(booleanOR(merge.getIsGreen(), enrich.getIsGreen()));
|
||||||
|
// OK but should be list of values
|
||||||
|
merge.setOpenAccessColor(chooseReference(merge.getOpenAccessColor(), enrich.getOpenAccessColor(), trust));
|
||||||
|
merge.setIsInDiamondJournal(booleanOR(merge.getIsInDiamondJournal(), enrich.getIsInDiamondJournal()));
|
||||||
|
merge.setPubliclyFunded(booleanOR(merge.getPubliclyFunded(), enrich.getPubliclyFunded()));
|
||||||
|
|
||||||
|
return merge;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends OtherResearchProduct> T mergeORP(T original, T enrich) {
|
||||||
|
int trust = compareTrust(original, enrich);
|
||||||
|
final T merge = mergeResult(original, enrich);
|
||||||
|
|
||||||
|
merge.setContactperson(unionDistinctLists(merge.getContactperson(), enrich.getContactperson(), trust));
|
||||||
|
merge.setContactgroup(unionDistinctLists(merge.getContactgroup(), enrich.getContactgroup(), trust));
|
||||||
|
merge.setTool(unionDistinctLists(merge.getTool(), enrich.getTool(), trust));
|
||||||
|
|
||||||
|
return merge;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Software> T mergeSoftware(T original, T enrich) {
|
||||||
|
int trust = compareTrust(original, enrich);
|
||||||
|
final T merge = mergeResult(original, enrich);
|
||||||
|
|
||||||
|
merge.setDocumentationUrl(unionDistinctLists(merge.getDocumentationUrl(), enrich.getDocumentationUrl(), trust));
|
||||||
|
merge.setLicense(unionDistinctLists(merge.getLicense(), enrich.getLicense(), trust));
|
||||||
|
merge.setCodeRepositoryUrl(chooseReference(merge.getCodeRepositoryUrl(), enrich.getCodeRepositoryUrl(), trust));
|
||||||
|
merge.setProgrammingLanguage(chooseReference(merge.getProgrammingLanguage(), enrich.getProgrammingLanguage(), trust));
|
||||||
|
|
||||||
|
return merge;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Dataset> T mergeDataset(T original, T enrich) {
|
||||||
|
int trust = compareTrust(original, enrich);
|
||||||
|
T merge = mergeResult(original, enrich);
|
||||||
|
|
||||||
|
merge.setStoragedate(chooseReference(merge.getStoragedate(), enrich.getStoragedate(), trust));
|
||||||
|
merge.setDevice(chooseReference(merge.getDevice(), enrich.getDevice(), trust));
|
||||||
|
merge.setSize(chooseReference(merge.getSize(), enrich.getSize(), trust));
|
||||||
|
merge.setVersion(chooseReference(merge.getVersion(), enrich.getVersion(), trust));
|
||||||
|
merge.setLastmetadataupdate(chooseReference(merge.getLastmetadataupdate(), enrich.getLastmetadataupdate(), trust));
|
||||||
|
merge.setMetadataversionnumber(chooseReference(merge.getMetadataversionnumber(), enrich.getMetadataversionnumber(), trust));
|
||||||
|
merge.setGeolocation(unionDistinctLists(merge.getGeolocation(), enrich.getGeolocation(), trust));
|
||||||
|
|
||||||
|
return merge;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Publication> T mergePublication(T original, T enrich) {
|
||||||
|
final int trust = compareTrust(original, enrich);
|
||||||
|
T merged = mergeResult(original, enrich);
|
||||||
|
|
||||||
|
merged.setJournal(chooseReference(merged.getJournal(), enrich.getJournal(), trust));
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Organization> T mergeOrganization(T left, T enrich) {
|
||||||
|
int trust = compareTrust(left, enrich);
|
||||||
|
T merged = mergeOafEntityFields(left, enrich, trust);
|
||||||
|
|
||||||
|
merged.setLegalshortname(chooseReference(merged.getLegalshortname(), enrich.getLegalshortname(), trust));
|
||||||
|
merged.setLegalname(chooseReference(merged.getLegalname(), enrich.getLegalname(), trust));
|
||||||
|
merged.setAlternativeNames(unionDistinctLists(enrich.getAlternativeNames(), merged.getAlternativeNames(), trust));
|
||||||
|
merged.setWebsiteurl(chooseReference(merged.getWebsiteurl(), enrich.getWebsiteurl(), trust));
|
||||||
|
merged.setLogourl(chooseReference(merged.getLogourl(), enrich.getLogourl(), trust));
|
||||||
|
merged.setEclegalbody(chooseReference(merged.getEclegalbody(), enrich.getEclegalbody(), trust));
|
||||||
|
merged.setEclegalperson(chooseReference(merged.getEclegalperson(), enrich.getEclegalperson(), trust));
|
||||||
|
merged.setEcnonprofit(chooseReference(merged.getEcnonprofit(), enrich.getEcnonprofit(), trust));
|
||||||
|
merged.setEcresearchorganization(chooseReference(merged.getEcresearchorganization(), enrich.getEcresearchorganization(), trust));
|
||||||
|
merged.setEchighereducation(chooseReference(merged.getEchighereducation(), enrich.getEchighereducation(), trust));
|
||||||
|
merged.setEcinternationalorganizationeurinterests(chooseReference(merged.getEcinternationalorganizationeurinterests(), enrich.getEcinternationalorganizationeurinterests(), trust));
|
||||||
|
merged.setEcinternationalorganization(chooseReference(merged.getEcinternationalorganization(), enrich.getEcinternationalorganization(), trust));
|
||||||
|
merged.setEcenterprise(chooseReference(merged.getEcenterprise(), enrich.getEcenterprise(), trust));
|
||||||
|
merged.setEcsmevalidated(chooseReference(merged.getEcsmevalidated(), enrich.getEcsmevalidated(), trust));
|
||||||
|
merged.setEcnutscode(chooseReference(merged.getEcnutscode(), enrich.getEcnutscode(), trust));
|
||||||
|
merged.setCountry(chooseReference(merged.getCountry(), enrich.getCountry(), trust));
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Project> T mergeProject(T original, T enrich) {
|
||||||
|
int trust = compareTrust(original, enrich);
|
||||||
|
T merged = mergeOafEntityFields(original, enrich, trust);
|
||||||
|
|
||||||
|
merged.setWebsiteurl(chooseReference(merged.getWebsiteurl(), enrich.getWebsiteurl(), trust));
|
||||||
|
merged.setCode(chooseReference(merged.getCode(), enrich.getCode(), trust));
|
||||||
|
merged.setAcronym(chooseReference(merged.getAcronym(), enrich.getAcronym(), trust));
|
||||||
|
merged.setTitle(chooseReference(merged.getTitle(), enrich.getTitle(), trust));
|
||||||
|
merged.setStartdate(chooseReference(merged.getStartdate(), enrich.getStartdate(), trust));
|
||||||
|
merged.setEnddate(chooseReference(merged.getEnddate(), enrich.getEnddate(), trust));
|
||||||
|
merged.setCallidentifier(chooseReference(merged.getCallidentifier(), enrich.getCallidentifier(), trust));
|
||||||
|
merged.setKeywords(chooseReference(merged.getKeywords(), enrich.getKeywords(), trust));
|
||||||
|
merged.setDuration(chooseReference(merged.getDuration(), enrich.getDuration(), trust));
|
||||||
|
merged.setEcsc39(chooseReference(merged.getEcsc39(), enrich.getEcsc39(), trust));
|
||||||
|
merged.setOamandatepublications(chooseReference(merged.getOamandatepublications(), enrich.getOamandatepublications(), trust));
|
||||||
|
merged.setEcarticle29_3(chooseReference(merged.getEcarticle29_3(), enrich.getEcarticle29_3(), trust));
|
||||||
|
merged.setSubjects(unionDistinctLists(merged.getSubjects(), enrich.getSubjects(), trust));
|
||||||
|
merged.setFundingtree(unionDistinctLists(merged.getFundingtree(), enrich.getFundingtree(), trust));
|
||||||
|
merged.setContracttype(chooseReference(merged.getContracttype(), enrich.getContracttype(), trust));
|
||||||
|
merged.setOptional1(chooseReference(merged.getOptional1(), enrich.getOptional1(), trust));
|
||||||
|
merged.setOptional2(chooseReference(merged.getOptional2(), enrich.getOptional2(), trust));
|
||||||
|
merged.setJsonextrainfo(chooseReference(merged.getJsonextrainfo(), enrich.getJsonextrainfo(), trust));
|
||||||
|
merged.setContactfullname(chooseReference(merged.getContactfullname(), enrich.getContactfullname(), trust));
|
||||||
|
merged.setContactfax(chooseReference(merged.getContactfax(), enrich.getContactfax(), trust));
|
||||||
|
merged.setContactphone(chooseReference(merged.getContactphone(), enrich.getContactphone(), trust));
|
||||||
|
merged.setContactemail(chooseReference(merged.getContactemail(), enrich.getContactemail(), trust));
|
||||||
|
merged.setSummary(chooseReference(merged.getSummary(), enrich.getSummary(), trust));
|
||||||
|
merged.setCurrency(chooseReference(merged.getCurrency(), enrich.getCurrency(), trust));
|
||||||
|
|
||||||
|
//missin in Project.merge
|
||||||
|
merged.setTotalcost(chooseReference(merged.getTotalcost(), enrich.getTotalcost(), trust));
|
||||||
|
merged.setFundedamount(chooseReference(merged.getFundedamount(), enrich.getFundedamount(), trust));
|
||||||
|
|
||||||
|
// trust ??
|
||||||
|
if (enrich.getH2020topiccode() != null && StringUtils.isEmpty(merged.getH2020topiccode())) {
|
||||||
|
merged.setH2020topiccode(enrich.getH2020topiccode());
|
||||||
|
merged.setH2020topicdescription(enrich.getH2020topicdescription());
|
||||||
|
}
|
||||||
|
|
||||||
|
merged.setH2020classification(unionDistinctLists(merged.getH2020classification(), enrich.getH2020classification(), trust));
|
||||||
|
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Longest lists list.
|
||||||
|
*
|
||||||
|
* @param a the a
|
||||||
|
* @param b the b
|
||||||
|
* @return the list
|
||||||
|
*/
|
||||||
|
public static List<Field<String>> longestLists(List<Field<String>> a, List<Field<String>> b) {
|
||||||
|
if (a == null || b == null)
|
||||||
|
return a == null ? b : a;
|
||||||
|
|
||||||
|
return a.size() >= b.size() ? a : b;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This main method apply the enrichment of the instances
|
||||||
|
*
|
||||||
|
* @param toEnrichInstances the instances that could be enriched
|
||||||
|
* @param enrichmentInstances the enrichment instances
|
||||||
|
* @return list of instances possibly enriched
|
||||||
|
*/
|
||||||
|
private static List<Instance> enrichInstances(final List<Instance> toEnrichInstances,
|
||||||
|
final List<Instance> enrichmentInstances) {
|
||||||
|
final List<Instance> enrichmentResult = new ArrayList<>();
|
||||||
|
|
||||||
|
if (toEnrichInstances == null) {
|
||||||
|
return enrichmentResult;
|
||||||
|
}
|
||||||
|
if (enrichmentInstances == null) {
|
||||||
|
return enrichmentResult;
|
||||||
|
}
|
||||||
|
Map<String, Instance> ri = toInstanceMap(enrichmentInstances);
|
||||||
|
|
||||||
|
toEnrichInstances.forEach(i -> {
|
||||||
|
final List<Instance> e = findEnrichmentsByPID(i.getPid(), ri);
|
||||||
|
if (e != null && e.size() > 0) {
|
||||||
|
e.forEach(enr -> applyEnrichment(i, enr));
|
||||||
|
} else {
|
||||||
|
final List<Instance> a = findEnrichmentsByPID(i.getAlternateIdentifier(), ri);
|
||||||
|
if (a != null && a.size() > 0) {
|
||||||
|
a.forEach(enr -> applyEnrichment(i, enr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enrichmentResult.add(i);
|
||||||
|
});
|
||||||
|
return enrichmentResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method converts the list of instance enrichments
|
||||||
|
* into a Map where the key is the normalized identifier
|
||||||
|
* and the value is the instance itself
|
||||||
|
*
|
||||||
|
* @param ri the list of enrichment instances
|
||||||
|
* @return the result map
|
||||||
|
*/
|
||||||
|
private static Map<String, Instance> toInstanceMap(final List<Instance> ri) {
|
||||||
|
return ri
|
||||||
|
.stream()
|
||||||
|
.filter(i -> i.getPid() != null || i.getAlternateIdentifier() != null)
|
||||||
|
.flatMap(i -> {
|
||||||
|
final List<Pair<String, Instance>> result = new ArrayList<>();
|
||||||
|
if (i.getPid() != null)
|
||||||
|
i
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.filter(MergeUtils::validPid)
|
||||||
|
.forEach(p -> result.add(new ImmutablePair<>(extractKeyFromPid(p), i)));
|
||||||
|
if (i.getAlternateIdentifier() != null)
|
||||||
|
i
|
||||||
|
.getAlternateIdentifier()
|
||||||
|
.stream()
|
||||||
|
.filter(MergeUtils::validPid)
|
||||||
|
.forEach(p -> result.add(new ImmutablePair<>(extractKeyFromPid(p), i)));
|
||||||
|
return result.stream();
|
||||||
|
})
|
||||||
|
.collect(
|
||||||
|
Collectors
|
||||||
|
.toMap(
|
||||||
|
Pair::getLeft,
|
||||||
|
Pair::getRight,
|
||||||
|
(a, b) -> a));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isFromDelegatedAuthority(Result r) {
|
||||||
|
return Optional
|
||||||
|
.ofNullable(r.getInstance())
|
||||||
|
.map(
|
||||||
|
instance -> instance
|
||||||
|
.stream()
|
||||||
|
.filter(i -> Objects.nonNull(i.getCollectedfrom()))
|
||||||
|
.map(i -> i.getCollectedfrom().getKey())
|
||||||
|
.anyMatch(cfId -> IdentifierFactory.delegatedAuthorityDatasourceIds().contains(cfId)))
|
||||||
|
.orElse(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Valid pid boolean.
|
||||||
|
*
|
||||||
|
* @param p the p
|
||||||
|
* @return the boolean
|
||||||
|
*/
|
||||||
|
private static boolean validPid(final StructuredProperty p) {
|
||||||
|
return p.getValue() != null && p.getQualifier() != null && p.getQualifier().getClassid() != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize pid string.
|
||||||
|
*
|
||||||
|
* @param pid the pid
|
||||||
|
* @return the string
|
||||||
|
*/
|
||||||
|
private static String extractKeyFromPid(final StructuredProperty pid) {
|
||||||
|
if (pid == null)
|
||||||
|
return null;
|
||||||
|
final StructuredProperty normalizedPid = CleaningFunctions.normalizePidValue(pid);
|
||||||
|
|
||||||
|
return String.format("%s::%s", normalizedPid.getQualifier().getClassid(), normalizedPid.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This utility method finds the list of enrichment instances
|
||||||
|
* that match one or more PIDs in the input list
|
||||||
|
*
|
||||||
|
* @param pids the list of PIDs
|
||||||
|
* @param enrichments the List of enrichment instances having the same pid
|
||||||
|
* @return the list
|
||||||
|
*/
|
||||||
|
private static List<Instance> findEnrichmentsByPID(final List<StructuredProperty> pids,
|
||||||
|
final Map<String, Instance> enrichments) {
|
||||||
|
if (pids == null || enrichments == null)
|
||||||
|
return null;
|
||||||
|
return pids
|
||||||
|
.stream()
|
||||||
|
.map(MergeUtils::extractKeyFromPid)
|
||||||
|
.map(enrichments::get)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is an enrichment boolean.
|
||||||
|
*
|
||||||
|
* @param e the e
|
||||||
|
* @return the boolean
|
||||||
|
*/
|
||||||
|
private static boolean isAnEnrichment(OafEntity e) {
|
||||||
|
return e.getDataInfo() != null &&
|
||||||
|
e.getDataInfo().getProvenanceaction() != null
|
||||||
|
&& ModelConstants.PROVENANCE_ENRICH.equalsIgnoreCase(e.getDataInfo().getProvenanceaction().getClassid());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method apply enrichment on a single instance
|
||||||
|
* The enrichment consists of replacing values on
|
||||||
|
* single attribute only if in the current instance is missing
|
||||||
|
* The only repeatable field enriched is measures
|
||||||
|
*
|
||||||
|
* @param merge the current instance
|
||||||
|
* @param enrichment the enrichment instance
|
||||||
|
*/
|
||||||
|
private static void applyEnrichment(final Instance merge, final Instance enrichment) {
|
||||||
|
if (merge == null || enrichment == null)
|
||||||
|
return;
|
||||||
|
|
||||||
|
merge.setLicense(firstNonNull(merge.getLicense(), enrichment.getLicense()));
|
||||||
|
merge.setAccessright(firstNonNull(merge.getAccessright(), enrichment.getAccessright()));
|
||||||
|
merge.setInstancetype(firstNonNull(merge.getInstancetype(), enrichment.getInstancetype()));
|
||||||
|
merge.setInstanceTypeMapping(firstNonNull(merge.getInstanceTypeMapping(), enrichment.getInstanceTypeMapping()));
|
||||||
|
merge.setHostedby(firstNonNull(merge.getHostedby(), enrichment.getHostedby()));
|
||||||
|
merge.setUrl(unionDistinctLists(merge.getUrl(), enrichment.getUrl(), 0));
|
||||||
|
merge.setDistributionlocation(firstNonNull(merge.getDistributionlocation(), enrichment.getDistributionlocation()));
|
||||||
|
merge.setCollectedfrom(firstNonNull(merge.getCollectedfrom(), enrichment.getCollectedfrom()));
|
||||||
|
// pid and alternateId are used for matching
|
||||||
|
merge.setDateofacceptance(firstNonNull(merge.getDateofacceptance(), enrichment.getDateofacceptance()));
|
||||||
|
merge.setProcessingchargeamount(firstNonNull(merge.getProcessingchargeamount(), enrichment.getProcessingchargeamount()));
|
||||||
|
merge.setProcessingchargecurrency(firstNonNull(merge.getProcessingchargecurrency(), enrichment.getProcessingchargecurrency()));
|
||||||
|
merge.setRefereed(firstNonNull(merge.getRefereed(), enrichment.getRefereed()));
|
||||||
|
merge.setMeasures(unionDistinctLists(merge.getMeasures(), enrichment.getMeasures(), 0));
|
||||||
|
merge.setFulltext(firstNonNull(merge.getFulltext(), enrichment.getFulltext()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int compareTrust(Oaf a, Oaf b) {
|
||||||
|
String left = Optional
|
||||||
|
.ofNullable(a.getDataInfo())
|
||||||
|
.map(DataInfo::getTrust)
|
||||||
|
.orElse("0.0");
|
||||||
|
|
||||||
|
String right = Optional
|
||||||
|
.ofNullable(b.getDataInfo())
|
||||||
|
.map(DataInfo::getTrust)
|
||||||
|
.orElse("0.0");
|
||||||
|
|
||||||
|
return left.compareTo(right);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -14,7 +14,6 @@ import java.util.stream.Collectors;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.AccessRightComparator;
|
import eu.dnetlib.dhp.schema.common.AccessRightComparator;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
|
||||||
public class OafMapperUtils {
|
public class OafMapperUtils {
|
||||||
|
@ -22,65 +21,6 @@ public class OafMapperUtils {
|
||||||
private OafMapperUtils() {
|
private OafMapperUtils() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Oaf merge(final Oaf left, final Oaf right) {
|
|
||||||
if (ModelSupport.isSubClass(left, OafEntity.class)) {
|
|
||||||
return mergeEntities((OafEntity) left, (OafEntity) right);
|
|
||||||
} else if (ModelSupport.isSubClass(left, Relation.class)) {
|
|
||||||
((Relation) left).mergeFrom((Relation) right);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("invalid Oaf type:" + left.getClass().getCanonicalName());
|
|
||||||
}
|
|
||||||
return left;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static OafEntity mergeEntities(OafEntity left, OafEntity right) {
|
|
||||||
if (ModelSupport.isSubClass(left, Result.class)) {
|
|
||||||
return mergeResults((Result) left, (Result) right);
|
|
||||||
} else if (ModelSupport.isSubClass(left, Datasource.class)) {
|
|
||||||
left.mergeFrom(right);
|
|
||||||
} else if (ModelSupport.isSubClass(left, Organization.class)) {
|
|
||||||
left.mergeFrom(right);
|
|
||||||
} else if (ModelSupport.isSubClass(left, Project.class)) {
|
|
||||||
left.mergeFrom(right);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("invalid OafEntity subtype:" + left.getClass().getCanonicalName());
|
|
||||||
}
|
|
||||||
return left;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Result mergeResults(Result left, Result right) {
|
|
||||||
|
|
||||||
final boolean leftFromDelegatedAuthority = isFromDelegatedAuthority(left);
|
|
||||||
final boolean rightFromDelegatedAuthority = isFromDelegatedAuthority(right);
|
|
||||||
|
|
||||||
if (leftFromDelegatedAuthority && !rightFromDelegatedAuthority) {
|
|
||||||
return left;
|
|
||||||
}
|
|
||||||
if (!leftFromDelegatedAuthority && rightFromDelegatedAuthority) {
|
|
||||||
return right;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new ResultTypeComparator().compare(left, right) < 0) {
|
|
||||||
left.mergeFrom(right);
|
|
||||||
return left;
|
|
||||||
} else {
|
|
||||||
right.mergeFrom(left);
|
|
||||||
return right;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean isFromDelegatedAuthority(Result r) {
|
|
||||||
return Optional
|
|
||||||
.ofNullable(r.getInstance())
|
|
||||||
.map(
|
|
||||||
instance -> instance
|
|
||||||
.stream()
|
|
||||||
.filter(i -> Objects.nonNull(i.getCollectedfrom()))
|
|
||||||
.map(i -> i.getCollectedfrom().getKey())
|
|
||||||
.anyMatch(cfId -> IdentifierFactory.delegatedAuthorityDatasourceIds().contains(cfId)))
|
|
||||||
.orElse(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static KeyValue keyValue(final String k, final String v) {
|
public static KeyValue keyValue(final String k, final String v) {
|
||||||
final KeyValue kv = new KeyValue();
|
final KeyValue kv = new KeyValue();
|
||||||
kv.setKey(k);
|
kv.setKey(k);
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf.utils;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class MergeUtilsTest {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||||
|
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMergePubs() throws IOException {
|
||||||
|
Publication p1 = read("publication_1.json", Publication.class);
|
||||||
|
Publication p2 = read("publication_2.json", Publication.class);
|
||||||
|
Dataset d1 = read("dataset_1.json", Dataset.class);
|
||||||
|
Dataset d2 = read("dataset_2.json", Dataset.class);
|
||||||
|
|
||||||
|
assertEquals(1, p1.getCollectedfrom().size());
|
||||||
|
assertEquals(ModelConstants.CROSSREF_ID, p1.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals(1, d2.getCollectedfrom().size());
|
||||||
|
assertFalse(cfId(d2.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
||||||
|
|
||||||
|
assertEquals(1, p2.getCollectedfrom().size());
|
||||||
|
assertFalse(cfId(p2.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
||||||
|
assertEquals(1, d1.getCollectedfrom().size());
|
||||||
|
assertTrue(cfId(d1.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
||||||
|
|
||||||
|
final Result p1d2 = MergeUtils.checkedMerge(p1, d2);
|
||||||
|
assertEquals(ModelConstants.PUBLICATION_RESULTTYPE_CLASSID, p1d2.getResulttype().getClassid());
|
||||||
|
assertTrue(p1d2 instanceof Publication);
|
||||||
|
assertEquals(p1.getId(), p1d2.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMergePubs_1() throws IOException {
|
||||||
|
Publication p2 = read("publication_2.json", Publication.class);
|
||||||
|
Dataset d1 = read("dataset_1.json", Dataset.class);
|
||||||
|
|
||||||
|
final Result p2d1 = MergeUtils.checkedMerge(p2, d1);
|
||||||
|
assertEquals((ModelConstants.DATASET_RESULTTYPE_CLASSID), p2d1.getResulttype().getClassid());
|
||||||
|
assertTrue(p2d1 instanceof Dataset);
|
||||||
|
assertEquals(d1.getId(), p2d1.getId());
|
||||||
|
assertEquals(2, p2d1.getCollectedfrom().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMergePubs_2() throws IOException {
|
||||||
|
Publication p1 = read("publication_1.json", Publication.class);
|
||||||
|
Publication p2 = read("publication_2.json", Publication.class);
|
||||||
|
|
||||||
|
Result p1p2 = MergeUtils.checkedMerge(p1, p2);
|
||||||
|
assertTrue(p1p2 instanceof Publication);
|
||||||
|
assertEquals(p1.getId(), p1p2.getId());
|
||||||
|
assertEquals(2, p1p2.getCollectedfrom().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDelegatedAuthority_1() throws IOException {
|
||||||
|
Dataset d1 = read("dataset_2.json", Dataset.class);
|
||||||
|
Dataset d2 = read("dataset_delegated.json", Dataset.class);
|
||||||
|
|
||||||
|
assertEquals(1, d2.getCollectedfrom().size());
|
||||||
|
assertTrue(cfId(d2.getCollectedfrom()).contains(ModelConstants.ZENODO_OD_ID));
|
||||||
|
|
||||||
|
Result res = (Result) MergeUtils.merge(d1, d2, true);
|
||||||
|
|
||||||
|
assertEquals(d2, res);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDelegatedAuthority_2() throws IOException {
|
||||||
|
Dataset p1 = read("publication_1.json", Dataset.class);
|
||||||
|
Dataset d2 = read("dataset_delegated.json", Dataset.class);
|
||||||
|
|
||||||
|
assertEquals(1, d2.getCollectedfrom().size());
|
||||||
|
assertTrue(cfId(d2.getCollectedfrom()).contains(ModelConstants.ZENODO_OD_ID));
|
||||||
|
|
||||||
|
Result res = (Result) MergeUtils.merge(p1, d2, true);
|
||||||
|
|
||||||
|
assertEquals(d2, res);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected HashSet<String> cfId(List<KeyValue> collectedfrom) {
|
||||||
|
return collectedfrom.stream().map(KeyValue::getKey).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected <T extends Result> T read(String filename, Class<T> clazz) throws IOException {
|
||||||
|
final String json = IOUtils.toString(getClass().getResourceAsStream(filename));
|
||||||
|
return OBJECT_MAPPER.readValue(json, clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -149,7 +149,7 @@ class OafMapperUtilsTest {
|
||||||
void testDate() {
|
void testDate() {
|
||||||
final String date = GraphCleaningFunctions.cleanDate("23-FEB-1998");
|
final String date = GraphCleaningFunctions.cleanDate("23-FEB-1998");
|
||||||
assertNotNull(date);
|
assertNotNull(date);
|
||||||
System.out.println(date);
|
assertEquals("1998-02-23", date);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -166,8 +166,8 @@ class OafMapperUtilsTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ModelConstants.PUBLICATION_RESULTTYPE_CLASSID,
|
ModelConstants.PUBLICATION_RESULTTYPE_CLASSID,
|
||||||
OafMapperUtils
|
MergeUtils
|
||||||
.mergeResults(p1, d2)
|
.mergeResult(p1, d2)
|
||||||
.getResulttype()
|
.getResulttype()
|
||||||
.getClassid());
|
.getClassid());
|
||||||
|
|
||||||
|
@ -178,8 +178,8 @@ class OafMapperUtilsTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ModelConstants.DATASET_RESULTTYPE_CLASSID,
|
ModelConstants.DATASET_RESULTTYPE_CLASSID,
|
||||||
OafMapperUtils
|
MergeUtils
|
||||||
.mergeResults(p2, d1)
|
.mergeResult(p2, d1)
|
||||||
.getResulttype()
|
.getResulttype()
|
||||||
.getClassid());
|
.getClassid());
|
||||||
}
|
}
|
||||||
|
@ -192,7 +192,7 @@ class OafMapperUtilsTest {
|
||||||
assertEquals(1, d2.getCollectedfrom().size());
|
assertEquals(1, d2.getCollectedfrom().size());
|
||||||
assertTrue(cfId(d2.getCollectedfrom()).contains(ModelConstants.ZENODO_OD_ID));
|
assertTrue(cfId(d2.getCollectedfrom()).contains(ModelConstants.ZENODO_OD_ID));
|
||||||
|
|
||||||
Result res = OafMapperUtils.mergeResults(d1, d2);
|
Result res = MergeUtils.mergeResult(d1, d2);
|
||||||
|
|
||||||
assertEquals(d2, res);
|
assertEquals(d2, res);
|
||||||
|
|
||||||
|
|
|
@ -23,15 +23,18 @@ public class InstanceTypeMatch extends AbstractListComparator {
|
||||||
|
|
||||||
// jolly types
|
// jolly types
|
||||||
translationMap.put("Conference object", "*");
|
translationMap.put("Conference object", "*");
|
||||||
|
translationMap.put("Research", "*");
|
||||||
translationMap.put("Other literature type", "*");
|
translationMap.put("Other literature type", "*");
|
||||||
translationMap.put("Unknown", "*");
|
translationMap.put("Unknown", "*");
|
||||||
translationMap.put("UNKNOWN", "*");
|
translationMap.put("UNKNOWN", "*");
|
||||||
|
|
||||||
// article types
|
// article types
|
||||||
translationMap.put("Article", "Article");
|
translationMap.put("Article", "Article");
|
||||||
|
translationMap.put("Journal", "Article");
|
||||||
translationMap.put("Data Paper", "Article");
|
translationMap.put("Data Paper", "Article");
|
||||||
translationMap.put("Software Paper", "Article");
|
translationMap.put("Software Paper", "Article");
|
||||||
translationMap.put("Preprint", "Article");
|
translationMap.put("Preprint", "Article");
|
||||||
|
translationMap.put("Part of book or chapter of book", "Article");
|
||||||
|
|
||||||
// thesis types
|
// thesis types
|
||||||
translationMap.put("Thesis", "Thesis");
|
translationMap.put("Thesis", "Thesis");
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.SerializableSupplier;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
||||||
|
|
||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.SerializableSupplier;
|
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
|
|
||||||
/** OAF model merging support. */
|
/** OAF model merging support. */
|
||||||
public class MergeAndGet {
|
public class MergeAndGet {
|
||||||
|
@ -46,20 +45,7 @@ public class MergeAndGet {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <G extends Oaf, A extends Oaf> G mergeFromAndGet(G x, A y) {
|
private static <G extends Oaf, A extends Oaf> G mergeFromAndGet(G x, A y) {
|
||||||
if (isSubClass(x, Relation.class) && isSubClass(y, Relation.class)) {
|
return (G) MergeUtils.merge(x, y);
|
||||||
((Relation) x).mergeFrom((Relation) y);
|
|
||||||
return x;
|
|
||||||
} else if (isSubClass(x, OafEntity.class)
|
|
||||||
&& isSubClass(y, OafEntity.class)
|
|
||||||
&& isSubClass(x, y)) {
|
|
||||||
((OafEntity) x).mergeFrom((OafEntity) y);
|
|
||||||
return x;
|
|
||||||
}
|
|
||||||
throw new RuntimeException(
|
|
||||||
String
|
|
||||||
.format(
|
|
||||||
"MERGE_FROM_AND_GET incompatible types: %s, %s",
|
|
||||||
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2024.
|
||||||
|
* SPDX-FileCopyrightText: © 2023 Consiglio Nazionale delle Ricerche
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
|
/** Encodes the Actionset promotion strategies */
|
||||||
|
public class PromoteAction {
|
||||||
|
|
||||||
|
/** The supported actionset promotion strategies
|
||||||
|
*
|
||||||
|
* ENRICH: promotes only records in the actionset matching another record in the
|
||||||
|
* graph and enriches them applying the given MergeAndGet strategy
|
||||||
|
* UPSERT: promotes all the records in an actionset, matching records are updated
|
||||||
|
* using the given MergeAndGet strategy, the non-matching record as inserted as they are.
|
||||||
|
*/
|
||||||
|
public enum Strategy {
|
||||||
|
ENRICH, UPSERT
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the string representation of the join type implementing the given PromoteAction.
|
||||||
|
*
|
||||||
|
* @param strategy the strategy to be used to promote the Actionset contents
|
||||||
|
* @return the join type used to implement the promotion strategy
|
||||||
|
*/
|
||||||
|
public static String joinTypeForStrategy(PromoteAction.Strategy strategy) {
|
||||||
|
switch (strategy) {
|
||||||
|
case ENRICH:
|
||||||
|
return "left_outer";
|
||||||
|
case UPSERT:
|
||||||
|
return "full_outer";
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException("unsupported PromoteAction: " + strategy.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -67,8 +67,9 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
String outputGraphTablePath = parser.get("outputGraphTablePath");
|
String outputGraphTablePath = parser.get("outputGraphTablePath");
|
||||||
logger.info("outputGraphTablePath: {}", outputGraphTablePath);
|
logger.info("outputGraphTablePath: {}", outputGraphTablePath);
|
||||||
|
|
||||||
MergeAndGet.Strategy strategy = MergeAndGet.Strategy.valueOf(parser.get("mergeAndGetStrategy").toUpperCase());
|
MergeAndGet.Strategy mergeAndGetStrategy = MergeAndGet.Strategy
|
||||||
logger.info("strategy: {}", strategy);
|
.valueOf(parser.get("mergeAndGetStrategy").toUpperCase());
|
||||||
|
logger.info("mergeAndGetStrategy: {}", mergeAndGetStrategy);
|
||||||
|
|
||||||
Boolean shouldGroupById = Optional
|
Boolean shouldGroupById = Optional
|
||||||
.ofNullable(parser.get("shouldGroupById"))
|
.ofNullable(parser.get("shouldGroupById"))
|
||||||
|
@ -76,6 +77,12 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
.orElse(true);
|
.orElse(true);
|
||||||
logger.info("shouldGroupById: {}", shouldGroupById);
|
logger.info("shouldGroupById: {}", shouldGroupById);
|
||||||
|
|
||||||
|
PromoteAction.Strategy promoteActionStrategy = Optional
|
||||||
|
.ofNullable(parser.get("promoteActionStrategy"))
|
||||||
|
.map(PromoteAction.Strategy::valueOf)
|
||||||
|
.orElse(PromoteAction.Strategy.UPSERT);
|
||||||
|
logger.info("promoteActionStrategy: {}", promoteActionStrategy);
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
Class<? extends Oaf> rowClazz = (Class<? extends Oaf>) Class.forName(graphTableClassName);
|
Class<? extends Oaf> rowClazz = (Class<? extends Oaf>) Class.forName(graphTableClassName);
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -97,7 +104,8 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
inputGraphTablePath,
|
inputGraphTablePath,
|
||||||
inputActionPayloadPath,
|
inputActionPayloadPath,
|
||||||
outputGraphTablePath,
|
outputGraphTablePath,
|
||||||
strategy,
|
mergeAndGetStrategy,
|
||||||
|
promoteActionStrategy,
|
||||||
rowClazz,
|
rowClazz,
|
||||||
actionPayloadClazz,
|
actionPayloadClazz,
|
||||||
shouldGroupById);
|
shouldGroupById);
|
||||||
|
@ -124,14 +132,16 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
String inputGraphTablePath,
|
String inputGraphTablePath,
|
||||||
String inputActionPayloadPath,
|
String inputActionPayloadPath,
|
||||||
String outputGraphTablePath,
|
String outputGraphTablePath,
|
||||||
MergeAndGet.Strategy strategy,
|
MergeAndGet.Strategy mergeAndGetStrategy,
|
||||||
|
PromoteAction.Strategy promoteActionStrategy,
|
||||||
Class<G> rowClazz,
|
Class<G> rowClazz,
|
||||||
Class<A> actionPayloadClazz, Boolean shouldGroupById) {
|
Class<A> actionPayloadClazz, Boolean shouldGroupById) {
|
||||||
Dataset<G> rowDS = readGraphTable(spark, inputGraphTablePath, rowClazz);
|
Dataset<G> rowDS = readGraphTable(spark, inputGraphTablePath, rowClazz);
|
||||||
Dataset<A> actionPayloadDS = readActionPayload(spark, inputActionPayloadPath, actionPayloadClazz);
|
Dataset<A> actionPayloadDS = readActionPayload(spark, inputActionPayloadPath, actionPayloadClazz);
|
||||||
|
|
||||||
Dataset<G> result = promoteActionPayloadForGraphTable(
|
Dataset<G> result = promoteActionPayloadForGraphTable(
|
||||||
rowDS, actionPayloadDS, strategy, rowClazz, actionPayloadClazz, shouldGroupById)
|
rowDS, actionPayloadDS, mergeAndGetStrategy, promoteActionStrategy, rowClazz, actionPayloadClazz,
|
||||||
|
shouldGroupById)
|
||||||
.map((MapFunction<G, G>) value -> value, Encoders.bean(rowClazz));
|
.map((MapFunction<G, G>) value -> value, Encoders.bean(rowClazz));
|
||||||
|
|
||||||
saveGraphTable(result, outputGraphTablePath);
|
saveGraphTable(result, outputGraphTablePath);
|
||||||
|
@ -183,7 +193,8 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
private static <G extends Oaf, A extends Oaf> Dataset<G> promoteActionPayloadForGraphTable(
|
private static <G extends Oaf, A extends Oaf> Dataset<G> promoteActionPayloadForGraphTable(
|
||||||
Dataset<G> rowDS,
|
Dataset<G> rowDS,
|
||||||
Dataset<A> actionPayloadDS,
|
Dataset<A> actionPayloadDS,
|
||||||
MergeAndGet.Strategy strategy,
|
MergeAndGet.Strategy mergeAndGetStrategy,
|
||||||
|
PromoteAction.Strategy promoteActionStrategy,
|
||||||
Class<G> rowClazz,
|
Class<G> rowClazz,
|
||||||
Class<A> actionPayloadClazz,
|
Class<A> actionPayloadClazz,
|
||||||
Boolean shouldGroupById) {
|
Boolean shouldGroupById) {
|
||||||
|
@ -195,8 +206,9 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
|
|
||||||
SerializableSupplier<Function<G, String>> rowIdFn = ModelSupport::idFn;
|
SerializableSupplier<Function<G, String>> rowIdFn = ModelSupport::idFn;
|
||||||
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
||||||
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn = MergeAndGet.functionFor(strategy);
|
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn = MergeAndGet
|
||||||
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(strategy);
|
.functionFor(mergeAndGetStrategy);
|
||||||
|
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(mergeAndGetStrategy);
|
||||||
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
||||||
SerializableSupplier<Function<G, Boolean>> isNotZeroFn = PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSourceAndTarget;
|
SerializableSupplier<Function<G, Boolean>> isNotZeroFn = PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSourceAndTarget;
|
||||||
|
|
||||||
|
@ -207,6 +219,7 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
actionPayloadIdFn,
|
actionPayloadIdFn,
|
||||||
mergeRowWithActionPayloadAndGetFn,
|
mergeRowWithActionPayloadAndGetFn,
|
||||||
|
promoteActionStrategy,
|
||||||
rowClazz,
|
rowClazz,
|
||||||
actionPayloadClazz);
|
actionPayloadClazz);
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
* @param rowIdFn Function used to get the id of graph table row
|
* @param rowIdFn Function used to get the id of graph table row
|
||||||
* @param actionPayloadIdFn Function used to get id of action payload instance
|
* @param actionPayloadIdFn Function used to get id of action payload instance
|
||||||
* @param mergeAndGetFn Function used to merge graph table row and action payload instance
|
* @param mergeAndGetFn Function used to merge graph table row and action payload instance
|
||||||
|
* @param promoteActionStrategy the Actionset promotion strategy
|
||||||
* @param rowClazz Class of graph table
|
* @param rowClazz Class of graph table
|
||||||
* @param actionPayloadClazz Class of action payload
|
* @param actionPayloadClazz Class of action payload
|
||||||
* @param <G> Type of graph table row
|
* @param <G> Type of graph table row
|
||||||
|
@ -46,6 +47,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
SerializableSupplier<Function<G, String>> rowIdFn,
|
SerializableSupplier<Function<G, String>> rowIdFn,
|
||||||
SerializableSupplier<Function<A, String>> actionPayloadIdFn,
|
SerializableSupplier<Function<A, String>> actionPayloadIdFn,
|
||||||
SerializableSupplier<BiFunction<G, A, G>> mergeAndGetFn,
|
SerializableSupplier<BiFunction<G, A, G>> mergeAndGetFn,
|
||||||
|
PromoteAction.Strategy promoteActionStrategy,
|
||||||
Class<G> rowClazz,
|
Class<G> rowClazz,
|
||||||
Class<A> actionPayloadClazz) {
|
Class<A> actionPayloadClazz) {
|
||||||
if (!isSubClass(rowClazz, actionPayloadClazz)) {
|
if (!isSubClass(rowClazz, actionPayloadClazz)) {
|
||||||
|
@ -61,7 +63,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
.joinWith(
|
.joinWith(
|
||||||
actionPayloadWithIdDS,
|
actionPayloadWithIdDS,
|
||||||
rowWithIdDS.col("_1").equalTo(actionPayloadWithIdDS.col("_1")),
|
rowWithIdDS.col("_1").equalTo(actionPayloadWithIdDS.col("_1")),
|
||||||
"full_outer")
|
PromoteAction.joinTypeForStrategy(promoteActionStrategy))
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>) value -> {
|
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>) value -> {
|
||||||
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
||||||
|
|
|
@ -41,6 +41,12 @@
|
||||||
"paramDescription": "strategy for merging graph table objects with action payload instances, MERGE_FROM_AND_GET or SELECT_NEWER_AND_GET",
|
"paramDescription": "strategy for merging graph table objects with action payload instances, MERGE_FROM_AND_GET or SELECT_NEWER_AND_GET",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pas",
|
||||||
|
"paramLongName": "promoteActionStrategy",
|
||||||
|
"paramDescription": "strategy for promoting the actionset contents into the graph tables, ENRICH or UPSERT (default)",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"paramName": "sgid",
|
"paramName": "sgid",
|
||||||
"paramLongName": "shouldGroupById",
|
"paramLongName": "shouldGroupById",
|
||||||
|
|
|
@ -115,6 +115,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${workingDir}/dataset</arg>
|
<arg>--outputGraphTablePath</arg><arg>${workingDir}/dataset</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="DecisionPromoteResultActionPayloadForDatasetTable"/>
|
<ok to="DecisionPromoteResultActionPayloadForDatasetTable"/>
|
||||||
|
@ -167,6 +168,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/dataset</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/dataset</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -106,6 +106,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/datasource</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/datasource</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -106,6 +106,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/organization</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/organization</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -114,6 +114,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
<arg>--outputGraphTablePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="DecisionPromoteResultActionPayloadForOtherResearchProductTable"/>
|
<ok to="DecisionPromoteResultActionPayloadForOtherResearchProductTable"/>
|
||||||
|
@ -166,6 +167,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/otherresearchproduct</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/otherresearchproduct</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -106,6 +106,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/project</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/project</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -115,6 +115,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${workingDir}/publication</arg>
|
<arg>--outputGraphTablePath</arg><arg>${workingDir}/publication</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="DecisionPromoteResultActionPayloadForPublicationTable"/>
|
<ok to="DecisionPromoteResultActionPayloadForPublicationTable"/>
|
||||||
|
@ -167,6 +168,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/publication</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/publication</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -107,6 +107,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/relation</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/relation</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -114,6 +114,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${workingDir}/software</arg>
|
<arg>--outputGraphTablePath</arg><arg>${workingDir}/software</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="DecisionPromoteResultActionPayloadForSoftwareTable"/>
|
<ok to="DecisionPromoteResultActionPayloadForSoftwareTable"/>
|
||||||
|
@ -166,6 +167,7 @@
|
||||||
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
<arg>--actionPayloadClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Result</arg>
|
||||||
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/software</arg>
|
<arg>--outputGraphTablePath</arg><arg>${outputGraphRootPath}/software</arg>
|
||||||
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
<arg>--mergeAndGetStrategy</arg><arg>${mergeAndGetStrategy}</arg>
|
||||||
|
<arg>--promoteActionStrategy</arg><arg>${promoteActionStrategy}</arg>
|
||||||
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
<arg>--shouldGroupById</arg><arg>${shouldGroupById}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
RuntimeException.class,
|
RuntimeException.class,
|
||||||
() -> PromoteActionPayloadFunctions
|
() -> PromoteActionPayloadFunctions
|
||||||
.joinGraphTableWithActionPayloadAndMerge(
|
.joinGraphTableWithActionPayloadAndMerge(
|
||||||
null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
null, null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -104,6 +104,7 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
actionPayloadIdFn,
|
actionPayloadIdFn,
|
||||||
mergeAndGetFn,
|
mergeAndGetFn,
|
||||||
|
PromoteAction.Strategy.UPSERT,
|
||||||
OafImplSubSub.class,
|
OafImplSubSub.class,
|
||||||
OafImplSubSub.class)
|
OafImplSubSub.class)
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
@ -183,6 +184,7 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
actionPayloadIdFn,
|
actionPayloadIdFn,
|
||||||
mergeAndGetFn,
|
mergeAndGetFn,
|
||||||
|
PromoteAction.Strategy.UPSERT,
|
||||||
OafImplSubSub.class,
|
OafImplSubSub.class,
|
||||||
OafImplSub.class)
|
OafImplSub.class)
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
|
|
@ -64,6 +64,9 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
final String pubmedInputPath = parser.get("pubmedInputPath");
|
final String pubmedInputPath = parser.get("pubmedInputPath");
|
||||||
log.info("pubmedInputPath: {}", pubmedInputPath);
|
log.info("pubmedInputPath: {}", pubmedInputPath);
|
||||||
|
|
||||||
|
final String openapcInputPath = parser.get("openapcInputPath");
|
||||||
|
log.info("openapcInputPath: {}", openapcInputPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
@ -85,8 +88,14 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
JavaPairRDD<Text, Text> pubmedRelations = prepareAffiliationRelations(
|
JavaPairRDD<Text, Text> pubmedRelations = prepareAffiliationRelations(
|
||||||
spark, pubmedInputPath, collectedFromPubmed);
|
spark, pubmedInputPath, collectedFromPubmed);
|
||||||
|
|
||||||
|
List<KeyValue> collectedFromOpenAPC = OafMapperUtils
|
||||||
|
.listKeyValues(ModelConstants.OPEN_APC_ID, "OpenAPC");
|
||||||
|
JavaPairRDD<Text, Text> openAPCRelations = prepareAffiliationRelations(
|
||||||
|
spark, openapcInputPath, collectedFromOpenAPC);
|
||||||
|
|
||||||
crossrefRelations
|
crossrefRelations
|
||||||
.union(pubmedRelations)
|
.union(pubmedRelations)
|
||||||
|
.union(openAPCRelations)
|
||||||
.saveAsHadoopFile(
|
.saveAsHadoopFile(
|
||||||
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class SparkAtomicActionScoreJob implements Serializable {
|
||||||
|
|
||||||
return projectScores.map((MapFunction<BipProjectModel, Project>) bipProjectScores -> {
|
return projectScores.map((MapFunction<BipProjectModel, Project>) bipProjectScores -> {
|
||||||
Project project = new Project();
|
Project project = new Project();
|
||||||
project.setId(bipProjectScores.getProjectId());
|
//project.setId(bipProjectScores.getProjectId());
|
||||||
project.setMeasures(bipProjectScores.toMeasures());
|
project.setMeasures(bipProjectScores.toMeasures());
|
||||||
return project;
|
return project;
|
||||||
}, Encoders.bean(Project.class))
|
}, Encoders.bean(Project.class))
|
||||||
|
|
|
@ -34,6 +34,11 @@ public class BipProjectModel {
|
||||||
|
|
||||||
String totalCitationCount;
|
String totalCitationCount;
|
||||||
|
|
||||||
|
public String getProjectId() {
|
||||||
|
return projectId;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// each project bip measure has exactly one value, hence one key-value pair
|
// each project bip measure has exactly one value, hence one key-value pair
|
||||||
private Measure createMeasure(String measureId, String measureValue) {
|
private Measure createMeasure(String measureId, String measureValue) {
|
||||||
|
|
||||||
|
|
|
@ -75,6 +75,7 @@ public class GetFOSSparkJob implements Serializable {
|
||||||
fosData.map((MapFunction<Row, FOSDataModel>) r -> {
|
fosData.map((MapFunction<Row, FOSDataModel>) r -> {
|
||||||
FOSDataModel fosDataModel = new FOSDataModel();
|
FOSDataModel fosDataModel = new FOSDataModel();
|
||||||
fosDataModel.setDoi(r.getString(0).toLowerCase());
|
fosDataModel.setDoi(r.getString(0).toLowerCase());
|
||||||
|
fosDataModel.setOaid(r.getString(1).toLowerCase());
|
||||||
fosDataModel.setLevel1(r.getString(2));
|
fosDataModel.setLevel1(r.getString(2));
|
||||||
fosDataModel.setLevel2(r.getString(3));
|
fosDataModel.setLevel2(r.getString(3));
|
||||||
fosDataModel.setLevel3(r.getString(4));
|
fosDataModel.setLevel3(r.getString(4));
|
||||||
|
|
|
@ -16,12 +16,14 @@ import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Subject;
|
import eu.dnetlib.dhp.schema.oaf.Subject;
|
||||||
|
@ -52,28 +54,46 @@ public class PrepareFOSSparkJob implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final Boolean distributeDOI = Optional
|
||||||
|
.ofNullable(parser.get("distributeDoi"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
|
if (distributeDOI)
|
||||||
distributeFOSdois(
|
distributeFOSdois(
|
||||||
spark,
|
spark,
|
||||||
sourcePath,
|
sourcePath,
|
||||||
|
|
||||||
outputPath);
|
outputPath);
|
||||||
|
else
|
||||||
|
distributeFOSoaid(spark, sourcePath, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void distributeFOSdois(SparkSession spark, String sourcePath, String outputPath) {
|
private static void distributeFOSoaid(SparkSession spark, String sourcePath, String outputPath) {
|
||||||
Dataset<FOSDataModel> fosDataset = readPath(spark, sourcePath, FOSDataModel.class);
|
Dataset<FOSDataModel> fosDataset = readPath(spark, sourcePath, FOSDataModel.class);
|
||||||
|
|
||||||
fosDataset
|
fosDataset
|
||||||
.groupByKey((MapFunction<FOSDataModel, String>) v -> v.getDoi().toLowerCase(), Encoders.STRING())
|
.groupByKey((MapFunction<FOSDataModel, String>) v -> v.getOaid().toLowerCase(), Encoders.STRING())
|
||||||
.mapGroups((MapGroupsFunction<String, FOSDataModel, Result>) (k, it) -> {
|
.mapGroups((MapGroupsFunction<String, FOSDataModel, Result>) (k, it) -> {
|
||||||
|
return getResult(ModelSupport.getIdPrefix(Result.class) + "|" + k, it);
|
||||||
|
}, Encoders.bean(Result.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath + "/fos");
|
||||||
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private static Result getResult(String k, Iterator<FOSDataModel> it) {
|
||||||
Result r = new Result();
|
Result r = new Result();
|
||||||
FOSDataModel first = it.next();
|
FOSDataModel first = it.next();
|
||||||
r.setId(DHPUtils.generateUnresolvedIdentifier(k, DOI));
|
r.setId(k);
|
||||||
|
|
||||||
HashSet<String> level1 = new HashSet<>();
|
HashSet<String> level1 = new HashSet<>();
|
||||||
HashSet<String> level2 = new HashSet<>();
|
HashSet<String> level2 = new HashSet<>();
|
||||||
|
@ -107,7 +127,17 @@ public class PrepareFOSSparkJob implements Serializable {
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
null));
|
null));
|
||||||
return r;
|
return r;
|
||||||
}, Encoders.bean(Result.class))
|
}
|
||||||
|
|
||||||
|
private static void distributeFOSdois(SparkSession spark, String sourcePath, String outputPath) {
|
||||||
|
Dataset<FOSDataModel> fosDataset = readPath(spark, sourcePath, FOSDataModel.class);
|
||||||
|
|
||||||
|
fosDataset
|
||||||
|
.groupByKey((MapFunction<FOSDataModel, String>) v -> v.getDoi().toLowerCase(), Encoders.STRING())
|
||||||
|
.mapGroups(
|
||||||
|
(MapGroupsFunction<String, FOSDataModel, Result>) (k,
|
||||||
|
it) -> getResult(DHPUtils.generateUnresolvedIdentifier(k, DOI), it),
|
||||||
|
Encoders.bean(Result.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.fosnodoi;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
CreateActionSetSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/fosnodoi/as_parameters.json"))));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> createActionSet(spark, inputPath, outputPath));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void createActionSet(SparkSession spark, String inputPath, String outputPath) {
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Result>) value -> OBJECT_MAPPER.readValue(value, Result.class),
|
||||||
|
Encoders.bean(Result.class))
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -22,12 +22,14 @@ import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
@ -37,16 +39,12 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
public static final String OPENCITATIONS_CLASSID = "sysimport:crosswalk:opencitations";
|
public static final String OPENCITATIONS_CLASSID = "sysimport:crosswalk:opencitations";
|
||||||
public static final String OPENCITATIONS_CLASSNAME = "Imported from OpenCitations";
|
public static final String OPENCITATIONS_CLASSNAME = "Imported from OpenCitations";
|
||||||
|
|
||||||
// DOI-to-DOI citations
|
|
||||||
public static final String COCI = "COCI";
|
|
||||||
|
|
||||||
// PMID-to-PMID citations
|
|
||||||
public static final String POCI = "POCI";
|
|
||||||
|
|
||||||
private static final String DOI_PREFIX = "50|doi_________::";
|
private static final String DOI_PREFIX = "50|doi_________::";
|
||||||
|
|
||||||
private static final String PMID_PREFIX = "50|pmid________::";
|
private static final String PMID_PREFIX = "50|pmid________::";
|
||||||
|
private static final String ARXIV_PREFIX = "50|arXiv_______::";
|
||||||
|
|
||||||
|
private static final String PMCID_PREFIX = "50|pmcid_______::";
|
||||||
private static final String TRUST = "0.91";
|
private static final String TRUST = "0.91";
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
@ -79,38 +77,30 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath {}", outputPath);
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
final boolean shouldDuplicateRels = Optional
|
|
||||||
.ofNullable(parser.get("shouldDuplicateRels"))
|
|
||||||
.map(Boolean::valueOf)
|
|
||||||
.orElse(Boolean.FALSE);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> extractContent(spark, inputPath, outputPath, shouldDuplicateRels));
|
spark -> extractContent(spark, inputPath, outputPath));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void extractContent(SparkSession spark, String inputPath, String outputPath,
|
private static void extractContent(SparkSession spark, String inputPath, String outputPath) {
|
||||||
boolean shouldDuplicateRels) {
|
|
||||||
|
|
||||||
getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, COCI)
|
getTextTextJavaPairRDD(spark, inputPath)
|
||||||
.union(getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, POCI))
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);// , GzipCodec.class);
|
||||||
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(SparkSession spark, String inputPath,
|
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(SparkSession spark, String inputPath) {
|
||||||
boolean shouldDuplicateRels, String prefix) {
|
|
||||||
return spark
|
return spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputPath + "/" + prefix + "/" + prefix + "_JSON/*")
|
.textFile(inputPath)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, COCI>) value -> OBJECT_MAPPER.readValue(value, COCI.class),
|
(MapFunction<String, COCI>) value -> OBJECT_MAPPER.readValue(value, COCI.class),
|
||||||
Encoders.bean(COCI.class))
|
Encoders.bean(COCI.class))
|
||||||
.flatMap(
|
.flatMap(
|
||||||
(FlatMapFunction<COCI, Relation>) value -> createRelation(
|
(FlatMapFunction<COCI, Relation>) value -> createRelation(
|
||||||
value, shouldDuplicateRels, prefix)
|
value)
|
||||||
.iterator(),
|
.iterator(),
|
||||||
Encoders.bean(Relation.class))
|
Encoders.bean(Relation.class))
|
||||||
.filter((FilterFunction<Relation>) Objects::nonNull)
|
.filter((FilterFunction<Relation>) Objects::nonNull)
|
||||||
|
@ -121,34 +111,68 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Relation> createRelation(COCI value, boolean duplicate, String p) {
|
private static List<Relation> createRelation(COCI value) throws JsonProcessingException {
|
||||||
|
|
||||||
List<Relation> relationList = new ArrayList<>();
|
List<Relation> relationList = new ArrayList<>();
|
||||||
String prefix;
|
|
||||||
String citing;
|
String citing;
|
||||||
String cited;
|
String cited;
|
||||||
|
|
||||||
switch (p) {
|
switch (value.getCiting_pid()) {
|
||||||
case COCI:
|
case "doi":
|
||||||
prefix = DOI_PREFIX;
|
citing = DOI_PREFIX
|
||||||
citing = prefix
|
|
||||||
+ IdentifierFactory
|
+ IdentifierFactory
|
||||||
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCiting()));
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCiting()));
|
||||||
cited = prefix
|
break;
|
||||||
|
case "pmid":
|
||||||
|
citing = PMID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCiting()));
|
||||||
|
break;
|
||||||
|
case "arxiv":
|
||||||
|
citing = ARXIV_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.arXiv.toString(), value.getCiting()));
|
||||||
|
break;
|
||||||
|
case "pmcid":
|
||||||
|
citing = PMCID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmc.toString(), value.getCiting()));
|
||||||
|
break;
|
||||||
|
case "isbn":
|
||||||
|
case "issn":
|
||||||
|
return relationList;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException("Invalid prefix: " + new ObjectMapper().writeValueAsString(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (value.getCited_pid()) {
|
||||||
|
case "doi":
|
||||||
|
cited = DOI_PREFIX
|
||||||
+ IdentifierFactory
|
+ IdentifierFactory
|
||||||
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCited()));
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCited()));
|
||||||
break;
|
break;
|
||||||
case POCI:
|
case "pmid":
|
||||||
prefix = PMID_PREFIX;
|
cited = PMID_PREFIX
|
||||||
citing = prefix
|
|
||||||
+ IdentifierFactory
|
|
||||||
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCiting()));
|
|
||||||
cited = prefix
|
|
||||||
+ IdentifierFactory
|
+ IdentifierFactory
|
||||||
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCited()));
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCited()));
|
||||||
break;
|
break;
|
||||||
|
case "arxiv":
|
||||||
|
cited = ARXIV_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.arXiv.toString(), value.getCited()));
|
||||||
|
break;
|
||||||
|
case "pmcid":
|
||||||
|
cited = PMCID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmc.toString(), value.getCited()));
|
||||||
|
break;
|
||||||
|
case "isbn":
|
||||||
|
case "issn":
|
||||||
|
return relationList;
|
||||||
default:
|
default:
|
||||||
throw new IllegalStateException("Invalid prefix: " + p);
|
throw new IllegalStateException("Invalid prefix: " + new ObjectMapper().writeValueAsString(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!citing.equals(cited)) {
|
if (!citing.equals(cited)) {
|
||||||
|
@ -157,15 +181,6 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
getRelation(
|
getRelation(
|
||||||
citing,
|
citing,
|
||||||
cited, ModelConstants.CITES));
|
cited, ModelConstants.CITES));
|
||||||
|
|
||||||
if (duplicate && value.getCiting().endsWith(".refs")) {
|
|
||||||
citing = prefix + IdentifierFactory
|
|
||||||
.md5(
|
|
||||||
CleaningFunctions
|
|
||||||
.normalizePidValue(
|
|
||||||
"doi", value.getCiting().substring(0, value.getCiting().indexOf(".refs"))));
|
|
||||||
relationList.add(getRelation(citing, cited, ModelConstants.CITES));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return relationList;
|
return relationList;
|
||||||
|
|
|
@ -12,10 +12,7 @@ import java.util.zip.ZipInputStream;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -37,17 +34,17 @@ public class GetOpenCitationsRefs implements Serializable {
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
final String[] inputFile = parser.get("inputFile").split(";");
|
// final String[] inputFile = parser.get("inputFile").split(";");
|
||||||
log.info("inputFile {}", Arrays.asList(inputFile));
|
// log.info("inputFile {}", Arrays.asList(inputFile));
|
||||||
|
|
||||||
final String workingPath = parser.get("workingPath");
|
final String inputPath = parser.get("inputPath");
|
||||||
log.info("workingPath {}", workingPath);
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
log.info("hdfsNameNode {}", hdfsNameNode);
|
log.info("hdfsNameNode {}", hdfsNameNode);
|
||||||
|
|
||||||
final String prefix = parser.get("prefix");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("prefix {}", prefix);
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set("fs.defaultFS", hdfsNameNode);
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
@ -56,20 +53,20 @@ public class GetOpenCitationsRefs implements Serializable {
|
||||||
|
|
||||||
GetOpenCitationsRefs ocr = new GetOpenCitationsRefs();
|
GetOpenCitationsRefs ocr = new GetOpenCitationsRefs();
|
||||||
|
|
||||||
for (String file : inputFile) {
|
ocr.doExtract(inputPath, outputPath, fileSystem);
|
||||||
ocr.doExtract(workingPath + "/Original/" + file, workingPath, fileSystem, prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doExtract(String inputFile, String workingPath, FileSystem fileSystem, String prefix)
|
private void doExtract(String inputPath, String outputPath, FileSystem fileSystem)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
final Path path = new Path(inputFile);
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||||
|
.listFiles(
|
||||||
FSDataInputStream oc_zip = fileSystem.open(path);
|
new Path(inputPath), true);
|
||||||
|
while (fileStatusListIterator.hasNext()) {
|
||||||
// int count = 1;
|
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||||
|
// do stuff with the file like ...
|
||||||
|
FSDataInputStream oc_zip = fileSystem.open(fileStatus.getPath());
|
||||||
try (ZipInputStream zis = new ZipInputStream(oc_zip)) {
|
try (ZipInputStream zis = new ZipInputStream(oc_zip)) {
|
||||||
ZipEntry entry = null;
|
ZipEntry entry = null;
|
||||||
while ((entry = zis.getNextEntry()) != null) {
|
while ((entry = zis.getNextEntry()) != null) {
|
||||||
|
@ -81,7 +78,7 @@ public class GetOpenCitationsRefs implements Serializable {
|
||||||
// count++;
|
// count++;
|
||||||
try (
|
try (
|
||||||
FSDataOutputStream out = fileSystem
|
FSDataOutputStream out = fileSystem
|
||||||
.create(new Path(workingPath + "/" + prefix + "/" + fileName + ".gz"));
|
.create(new Path(outputPath + "/" + fileName + ".gz"));
|
||||||
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
||||||
|
|
||||||
IOUtils.copy(zis, gzipOs);
|
IOUtils.copy(zis, gzipOs);
|
||||||
|
@ -92,6 +89,7 @@ public class GetOpenCitationsRefs implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,171 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.opencitations;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.zip.ZipEntry;
|
||||||
|
import java.util.zip.ZipInputStream;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 29/02/24
|
||||||
|
*/
|
||||||
|
public class MapOCIdsInPids implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
private static final String DELIMITER = ",";
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
MapOCIdsInPids.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/remap_parameters.json"))));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("inputPath");
|
||||||
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
|
final String nameNode = parser.get("nameNode");
|
||||||
|
log.info("nameNode {}", nameNode);
|
||||||
|
|
||||||
|
unzipCorrespondenceFile(inputPath, nameNode);
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> mapIdentifiers(spark, inputPath, outputPath));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void unzipCorrespondenceFile(String inputPath, String hdfsNameNode) throws IOException {
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
|
||||||
|
final Path path = new Path(inputPath + "/correspondence/omid.zip");
|
||||||
|
FileSystem fileSystem = FileSystem.get(conf);
|
||||||
|
|
||||||
|
FSDataInputStream project_zip = fileSystem.open(path);
|
||||||
|
|
||||||
|
try (ZipInputStream zis = new ZipInputStream(project_zip)) {
|
||||||
|
ZipEntry entry = null;
|
||||||
|
while ((entry = zis.getNextEntry()) != null) {
|
||||||
|
|
||||||
|
if (!entry.isDirectory()) {
|
||||||
|
String fileName = entry.getName();
|
||||||
|
byte buffer[] = new byte[1024];
|
||||||
|
int count;
|
||||||
|
|
||||||
|
try (
|
||||||
|
FSDataOutputStream out = fileSystem
|
||||||
|
.create(new Path(inputPath + "/correspondence/omid.csv"))) {
|
||||||
|
|
||||||
|
while ((count = zis.read(buffer, 0, buffer.length)) != -1)
|
||||||
|
out.write(buffer, 0, count);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void mapIdentifiers(SparkSession spark, String inputPath, String outputPath) {
|
||||||
|
Dataset<COCI> coci = spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath + "/JSON")
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, COCI>) value -> OBJECT_MAPPER.readValue(value, COCI.class),
|
||||||
|
Encoders.bean(COCI.class));
|
||||||
|
|
||||||
|
Dataset<Tuple2<String, String>> correspondenceData = spark
|
||||||
|
.read()
|
||||||
|
.format("csv")
|
||||||
|
.option("sep", DELIMITER)
|
||||||
|
.option("inferSchema", "true")
|
||||||
|
.option("header", "true")
|
||||||
|
.option("quotes", "\"")
|
||||||
|
.load(inputPath + "/correspondence/omid.csv")
|
||||||
|
.repartition(5000)
|
||||||
|
.flatMap((FlatMapFunction<Row, Tuple2<String, String>>) r -> {
|
||||||
|
String ocIdentifier = r.getAs("omid");
|
||||||
|
String[] correspondentIdentifiers = ((String) r.getAs("id")).split(" ");
|
||||||
|
return Arrays
|
||||||
|
.stream(correspondentIdentifiers)
|
||||||
|
.map(ci -> new Tuple2<String, String>(ocIdentifier, ci))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
.iterator();
|
||||||
|
}, Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
|
Dataset<COCI> mappedCitingDataset = coci
|
||||||
|
.joinWith(correspondenceData, coci.col("citing").equalTo(correspondenceData.col("_1")))
|
||||||
|
.map((MapFunction<Tuple2<COCI, Tuple2<String, String>>, COCI>) t2 -> {
|
||||||
|
String correspondent = t2._2()._2();
|
||||||
|
t2._1().setCiting_pid(correspondent.substring(0, correspondent.indexOf(":")));
|
||||||
|
t2._1().setCiting(correspondent.substring(correspondent.indexOf(":") + 1));
|
||||||
|
return t2._1();
|
||||||
|
}, Encoders.bean(COCI.class));
|
||||||
|
|
||||||
|
mappedCitingDataset
|
||||||
|
.joinWith(correspondenceData, mappedCitingDataset.col("cited").equalTo(correspondenceData.col("_1")))
|
||||||
|
.map((MapFunction<Tuple2<COCI, Tuple2<String, String>>, COCI>) t2 -> {
|
||||||
|
String correspondent = t2._2()._2();
|
||||||
|
t2._1().setCited_pid(correspondent.substring(0, correspondent.indexOf(":")));
|
||||||
|
t2._1().setCited(correspondent.substring(correspondent.indexOf(":") + 1));
|
||||||
|
return t2._1();
|
||||||
|
}, Encoders.bean(COCI.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Append)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -12,11 +12,9 @@ import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.fs.LocatedFileStatus;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.fs.RemoteIterator;
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -42,19 +40,21 @@ public class ReadCOCI implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final String[] inputFile = parser.get("inputFile").split(";");
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
log.info("inputFile {}", Arrays.asList(inputFile));
|
log.info("hdfsNameNode {}", hdfsNameNode);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged = isSparkSessionManaged(parser);
|
Boolean isSparkSessionManaged = isSparkSessionManaged(parser);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String workingPath = parser.get("workingPath");
|
final String workingPath = parser.get("inputPath");
|
||||||
log.info("workingPath {}", workingPath);
|
log.info("workingPath {}", workingPath);
|
||||||
|
|
||||||
final String format = parser.get("format");
|
|
||||||
log.info("format {}", format);
|
|
||||||
|
|
||||||
SparkConf sconf = new SparkConf();
|
SparkConf sconf = new SparkConf();
|
||||||
|
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
|
||||||
|
FileSystem fileSystem = FileSystem.get(conf);
|
||||||
final String delimiter = Optional
|
final String delimiter = Optional
|
||||||
.ofNullable(parser.get("delimiter"))
|
.ofNullable(parser.get("delimiter"))
|
||||||
.orElse(DEFAULT_DELIMITER);
|
.orElse(DEFAULT_DELIMITER);
|
||||||
|
@ -66,20 +66,21 @@ public class ReadCOCI implements Serializable {
|
||||||
doRead(
|
doRead(
|
||||||
spark,
|
spark,
|
||||||
workingPath,
|
workingPath,
|
||||||
inputFile,
|
fileSystem,
|
||||||
outputPath,
|
outputPath,
|
||||||
delimiter,
|
delimiter);
|
||||||
format);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void doRead(SparkSession spark, String workingPath, String[] inputFiles,
|
private static void doRead(SparkSession spark, String workingPath, FileSystem fileSystem,
|
||||||
String outputPath,
|
String outputPath,
|
||||||
String delimiter, String format) {
|
String delimiter) throws IOException {
|
||||||
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||||
for (String inputFile : inputFiles) {
|
.listFiles(
|
||||||
String pString = workingPath + "/" + inputFile + ".gz";
|
new Path(workingPath), true);
|
||||||
|
while (fileStatusListIterator.hasNext()) {
|
||||||
|
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||||
|
log.info("extracting file {}", fileStatus.getPath().toString());
|
||||||
Dataset<Row> cociData = spark
|
Dataset<Row> cociData = spark
|
||||||
.read()
|
.read()
|
||||||
.format("csv")
|
.format("csv")
|
||||||
|
@ -87,26 +88,26 @@ public class ReadCOCI implements Serializable {
|
||||||
.option("inferSchema", "true")
|
.option("inferSchema", "true")
|
||||||
.option("header", "true")
|
.option("header", "true")
|
||||||
.option("quotes", "\"")
|
.option("quotes", "\"")
|
||||||
.load(pString)
|
.load(fileStatus.getPath().toString())
|
||||||
.repartition(100);
|
.repartition(100);
|
||||||
|
|
||||||
cociData.map((MapFunction<Row, COCI>) row -> {
|
cociData.map((MapFunction<Row, COCI>) row -> {
|
||||||
|
|
||||||
COCI coci = new COCI();
|
COCI coci = new COCI();
|
||||||
if (format.equals("COCI")) {
|
|
||||||
coci.setCiting(row.getString(1));
|
coci.setCiting(row.getString(1));
|
||||||
coci.setCited(row.getString(2));
|
coci.setCited(row.getString(2));
|
||||||
} else {
|
|
||||||
coci.setCiting(String.valueOf(row.getInt(1)));
|
|
||||||
coci.setCited(String.valueOf(row.getInt(2)));
|
|
||||||
}
|
|
||||||
coci.setOci(row.getString(0));
|
coci.setOci(row.getString(0));
|
||||||
|
|
||||||
return coci;
|
return coci;
|
||||||
}, Encoders.bean(COCI.class))
|
}, Encoders.bean(COCI.class))
|
||||||
|
.filter((FilterFunction<COCI>) c -> c != null)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Append)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + inputFile);
|
.json(outputPath);
|
||||||
|
fileSystem.rename(fileStatus.getPath(), new Path("/tmp/miriam/OC/DONE"));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,10 @@ public class COCI implements Serializable {
|
||||||
private String oci;
|
private String oci;
|
||||||
|
|
||||||
private String citing;
|
private String citing;
|
||||||
|
private String citing_pid;
|
||||||
|
|
||||||
private String cited;
|
private String cited;
|
||||||
|
private String cited_pid;
|
||||||
|
|
||||||
public String getOci() {
|
public String getOci() {
|
||||||
return oci;
|
return oci;
|
||||||
|
@ -25,6 +27,8 @@ public class COCI implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCiting(String citing) {
|
public void setCiting(String citing) {
|
||||||
|
if (citing != null && citing.startsWith("omid:"))
|
||||||
|
citing = citing.substring(5);
|
||||||
this.citing = citing;
|
this.citing = citing;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +37,24 @@ public class COCI implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCited(String cited) {
|
public void setCited(String cited) {
|
||||||
|
if (cited != null && cited.startsWith("omid:"))
|
||||||
|
cited = cited.substring(5);
|
||||||
this.cited = cited;
|
this.cited = cited;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getCiting_pid() {
|
||||||
|
return citing_pid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCiting_pid(String citing_pid) {
|
||||||
|
this.citing_pid = citing_pid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCited_pid() {
|
||||||
|
return cited_pid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCited_pid(String cited_pid) {
|
||||||
|
this.cited_pid = cited_pid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,20 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.project;
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProgramme;
|
||||||
import java.util.Arrays;
|
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
|
||||||
import java.util.Objects;
|
import eu.dnetlib.dhp.actionmanager.project.utils.model.JsonTopic;
|
||||||
import java.util.Optional;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.H2020Classification;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.H2020Programme;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
@ -18,24 +26,14 @@ import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProgramme;
|
|
||||||
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
|
|
||||||
import eu.dnetlib.dhp.actionmanager.project.utils.model.EXCELTopic;
|
|
||||||
import eu.dnetlib.dhp.actionmanager.project.utils.model.JsonTopic;
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.H2020Classification;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.H2020Programme;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class that makes the ActionSet. To prepare the AS two joins are needed
|
* Class that makes the ActionSet. To prepare the AS two joins are needed
|
||||||
*
|
*
|
||||||
|
@ -160,9 +158,11 @@ public class SparkAtomicActionJob {
|
||||||
(MapFunction<Project, String>) OafEntity::getId,
|
(MapFunction<Project, String>) OafEntity::getId,
|
||||||
Encoders.STRING())
|
Encoders.STRING())
|
||||||
.mapGroups((MapGroupsFunction<String, Project, Project>) (s, it) -> {
|
.mapGroups((MapGroupsFunction<String, Project, Project>) (s, it) -> {
|
||||||
Project first = it.next();
|
Project merge = it.next();
|
||||||
it.forEachRemaining(first::mergeFrom);
|
while (it.hasNext()) {
|
||||||
return first;
|
merge = MergeUtils.mergeProject(merge, it.next());
|
||||||
|
}
|
||||||
|
return merge;
|
||||||
}, Encoders.bean(Project.class))
|
}, Encoders.bean(Project.class))
|
||||||
.toJavaRDD()
|
.toJavaRDD()
|
||||||
.map(p -> new AtomicAction(Project.class, p))
|
.map(p -> new AtomicAction(Project.class, p))
|
||||||
|
|
|
@ -0,0 +1,195 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.transformativeagreement;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.transformativeagreement.model.TransformativeAgreementModel;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Country;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static final String IREL_PROJECT = "40|100018998___::1e5e62235d094afd01cd56e65112fc63";
|
||||||
|
private static final String TRANSFORMATIVE_AGREEMENT = "openapc::transformativeagreement";
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
CreateActionSetSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/transformativeagreement/as_parameters.json"))));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("inputPath");
|
||||||
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> createActionSet(spark, inputPath, outputPath));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void createActionSet(SparkSession spark, String inputPath, String outputPath) {
|
||||||
|
JavaRDD<AtomicAction> relations = spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, TransformativeAgreementModel>) value -> OBJECT_MAPPER
|
||||||
|
.readValue(value, TransformativeAgreementModel.class),
|
||||||
|
Encoders.bean(TransformativeAgreementModel.class))
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<TransformativeAgreementModel, Relation>) value -> createRelation(
|
||||||
|
value)
|
||||||
|
.iterator(),
|
||||||
|
Encoders.bean(Relation.class))
|
||||||
|
.filter((FilterFunction<Relation>) Objects::nonNull)
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p));
|
||||||
|
//TODO relations in stand-by waiting to know if we need to create them or not In case we need just make a union before saving the sequence file
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, TransformativeAgreementModel>) value -> OBJECT_MAPPER
|
||||||
|
.readValue(value, TransformativeAgreementModel.class),
|
||||||
|
Encoders.bean(TransformativeAgreementModel.class))
|
||||||
|
.map(
|
||||||
|
(MapFunction<TransformativeAgreementModel, Result>) value -> createResult(
|
||||||
|
value),
|
||||||
|
Encoders.bean(Result.class))
|
||||||
|
.filter((FilterFunction<Result>) r -> r != null)
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Result createResult(TransformativeAgreementModel value) {
|
||||||
|
Result r = new Result();
|
||||||
|
r
|
||||||
|
.setId(
|
||||||
|
"50|doi_________::"
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getDoi())));
|
||||||
|
r.setTransformativeAgreement(value.getAgreement());
|
||||||
|
Country country = new Country();
|
||||||
|
country.setClassid(value.getCountry());
|
||||||
|
country.setClassname(value.getCountry());
|
||||||
|
country
|
||||||
|
.setDataInfo(
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, ModelConstants.SYSIMPORT_ACTIONSET, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
"openapc::transformativeagreement",
|
||||||
|
"Harvested from Trnasformative Agreement file from OpenAPC",
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.9"));
|
||||||
|
country.setSchemeid(ModelConstants.DNET_COUNTRY_TYPE);
|
||||||
|
country.setSchemename(ModelConstants.DNET_COUNTRY_TYPE);
|
||||||
|
r.setCountry(Arrays.asList(country));
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Relation> createRelation(TransformativeAgreementModel value) {
|
||||||
|
|
||||||
|
List<Relation> relationList = new ArrayList<>();
|
||||||
|
|
||||||
|
if (value.getAgreement().startsWith("IReL")) {
|
||||||
|
String paper;
|
||||||
|
|
||||||
|
paper = "50|doi_________::"
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getDoi()));
|
||||||
|
|
||||||
|
relationList
|
||||||
|
.add(
|
||||||
|
getRelation(
|
||||||
|
paper,
|
||||||
|
IREL_PROJECT, ModelConstants.IS_PRODUCED_BY));
|
||||||
|
|
||||||
|
relationList.add(getRelation(IREL_PROJECT, paper, ModelConstants.PRODUCES));
|
||||||
|
}
|
||||||
|
return relationList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Relation getRelation(
|
||||||
|
String source,
|
||||||
|
String target,
|
||||||
|
String relClass) {
|
||||||
|
|
||||||
|
return OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
source,
|
||||||
|
target,
|
||||||
|
ModelConstants.RESULT_PROJECT,
|
||||||
|
ModelConstants.OUTCOME,
|
||||||
|
relClass,
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
|
OafMapperUtils.keyValue(ModelConstants.OPEN_APC_ID, ModelConstants.OPEN_APC_NAME)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
TRANSFORMATIVE_AGREEMENT, "Transformative Agreement",
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.9"),
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.transformativeagreement.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 18/12/23
|
||||||
|
*/
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
|
||||||
|
public class TransformativeAgreementModel implements Serializable {
|
||||||
|
private String institution;
|
||||||
|
private String doi;
|
||||||
|
private String agreement;
|
||||||
|
private String country;
|
||||||
|
|
||||||
|
public String getCountry() {
|
||||||
|
return country;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCountry(String country) {
|
||||||
|
this.country = country;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getInstitution() {
|
||||||
|
return institution;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInstitution(String institution) {
|
||||||
|
this.institution = institution;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDoi() {
|
||||||
|
return doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDoi(String doi) {
|
||||||
|
this.doi = doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getAgreement() {
|
||||||
|
return agreement;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAgreement(String agreement) {
|
||||||
|
this.agreement = agreement;
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,6 +19,7 @@ import org.slf4j.LoggerFactory;
|
||||||
import eu.dnetlib.dhp.aggregation.common.ReporterCallback;
|
import eu.dnetlib.dhp.aggregation.common.ReporterCallback;
|
||||||
import eu.dnetlib.dhp.aggregation.common.ReportingJob;
|
import eu.dnetlib.dhp.aggregation.common.ReportingJob;
|
||||||
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.base.BaseCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.file.FileCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.file.FileCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.file.FileGZipCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.file.FileGZipCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.mongodb.MDStoreCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.mongodb.MDStoreCollectorPlugin;
|
||||||
|
@ -120,6 +121,8 @@ public class CollectorWorker extends ReportingJob {
|
||||||
return new FileCollectorPlugin(fileSystem);
|
return new FileCollectorPlugin(fileSystem);
|
||||||
case fileGzip:
|
case fileGzip:
|
||||||
return new FileGZipCollectorPlugin(fileSystem);
|
return new FileGZipCollectorPlugin(fileSystem);
|
||||||
|
case baseDump:
|
||||||
|
return new BaseCollectorPlugin(this.fileSystem);
|
||||||
case other:
|
case other:
|
||||||
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
||||||
.ofNullable(api.getParams().get("other_plugin_type"))
|
.ofNullable(api.getParams().get("other_plugin_type"))
|
||||||
|
|
|
@ -0,0 +1,244 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.orcid;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.concurrent.BlockingQueue;
|
||||||
|
|
||||||
|
import javax.swing.*;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.http.HttpHeaders;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
public class ORCIDWorker extends Thread {
|
||||||
|
|
||||||
|
final static Logger log = LoggerFactory.getLogger(ORCIDWorker.class);
|
||||||
|
|
||||||
|
public static String JOB_COMPLETE = "JOB_COMPLETE";
|
||||||
|
|
||||||
|
private static final String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)";
|
||||||
|
|
||||||
|
private final BlockingQueue<String> queue;
|
||||||
|
|
||||||
|
private boolean hasComplete = false;
|
||||||
|
|
||||||
|
private final SequenceFile.Writer employments;
|
||||||
|
|
||||||
|
private final SequenceFile.Writer summary;
|
||||||
|
private final SequenceFile.Writer works;
|
||||||
|
|
||||||
|
private final String token;
|
||||||
|
|
||||||
|
private final String id;
|
||||||
|
|
||||||
|
public static ORCIDWorkerBuilder builder() {
|
||||||
|
return new ORCIDWorkerBuilder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorker(String id, BlockingQueue<String> myqueue, SequenceFile.Writer employments,
|
||||||
|
SequenceFile.Writer summary, SequenceFile.Writer works, String token) {
|
||||||
|
this.id = id;
|
||||||
|
this.queue = myqueue;
|
||||||
|
this.employments = employments;
|
||||||
|
this.summary = summary;
|
||||||
|
this.works = works;
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String retrieveURL(final String id, final String apiUrl, String token) {
|
||||||
|
try {
|
||||||
|
final HttpURLConnection urlConn = getHttpURLConnection(apiUrl, token);
|
||||||
|
if (urlConn.getResponseCode() > 199 && urlConn.getResponseCode() < 300) {
|
||||||
|
InputStream input = urlConn.getInputStream();
|
||||||
|
return IOUtils.toString(input);
|
||||||
|
} else {
|
||||||
|
log
|
||||||
|
.error(
|
||||||
|
"Thread {} UNABLE TO DOWNLOAD FROM THIS URL {} , status code {}", id, apiUrl,
|
||||||
|
urlConn.getResponseCode());
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Thread {} Error on retrieving URL {} {}", id, apiUrl, e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private static HttpURLConnection getHttpURLConnection(String apiUrl, String token) throws IOException {
|
||||||
|
final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiUrl).openConnection();
|
||||||
|
final HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
urlConn.setInstanceFollowRedirects(false);
|
||||||
|
urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000);
|
||||||
|
urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000);
|
||||||
|
urlConn.addRequestProperty(HttpHeaders.USER_AGENT, userAgent);
|
||||||
|
urlConn.addRequestProperty(HttpHeaders.AUTHORIZATION, String.format("Bearer %s", token));
|
||||||
|
return urlConn;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String generateSummaryURL(final String orcidId) {
|
||||||
|
return "https://api.orcid.org/v3.0/" + orcidId + "/record";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String generateWorksURL(final String orcidId) {
|
||||||
|
return "https://api.orcid.org/v3.0/" + orcidId + "/works";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String generateEmploymentsURL(final String orcidId) {
|
||||||
|
return "https://api.orcid.org/v3.0/" + orcidId + "/employments";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void writeResultToSequenceFile(String id, String url, String token, String orcidId,
|
||||||
|
SequenceFile.Writer file) throws IOException {
|
||||||
|
final String response = retrieveURL(id, url, token);
|
||||||
|
if (response != null) {
|
||||||
|
if (orcidId == null) {
|
||||||
|
log.error("Thread {} {} {}", id, orcidId, response);
|
||||||
|
throw new RuntimeException("null items ");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file == null) {
|
||||||
|
log.error("Thread {} file is null for {} URL:{}", id, url, orcidId);
|
||||||
|
} else {
|
||||||
|
file.append(new Text(orcidId), new Text(response));
|
||||||
|
file.hflush();
|
||||||
|
}
|
||||||
|
|
||||||
|
} else
|
||||||
|
log.error("Thread {} response is null for {} URL:{}", id, url, orcidId);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
final Text key = new Text();
|
||||||
|
final Text value = new Text();
|
||||||
|
long start;
|
||||||
|
long total_time;
|
||||||
|
String orcidId = "";
|
||||||
|
int requests = 0;
|
||||||
|
if (summary == null || employments == null || works == null)
|
||||||
|
throw new RuntimeException("Null files");
|
||||||
|
|
||||||
|
while (!hasComplete) {
|
||||||
|
try {
|
||||||
|
|
||||||
|
orcidId = queue.take();
|
||||||
|
|
||||||
|
if (orcidId.equalsIgnoreCase(JOB_COMPLETE)) {
|
||||||
|
hasComplete = true;
|
||||||
|
} else {
|
||||||
|
start = System.currentTimeMillis();
|
||||||
|
writeResultToSequenceFile(id, generateSummaryURL(orcidId), token, orcidId, summary);
|
||||||
|
total_time = System.currentTimeMillis() - start;
|
||||||
|
requests++;
|
||||||
|
if (total_time < 1000) {
|
||||||
|
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
|
||||||
|
// hence
|
||||||
|
// the time between two http request in a thread must be 1 second
|
||||||
|
Thread.sleep(1000L - total_time);
|
||||||
|
}
|
||||||
|
start = System.currentTimeMillis();
|
||||||
|
writeResultToSequenceFile(id, generateWorksURL(orcidId), token, orcidId, works);
|
||||||
|
total_time = System.currentTimeMillis() - start;
|
||||||
|
requests++;
|
||||||
|
if (total_time < 1000) {
|
||||||
|
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
|
||||||
|
// hence
|
||||||
|
// the time between two http request in a thread must be 1 second
|
||||||
|
Thread.sleep(1000L - total_time);
|
||||||
|
}
|
||||||
|
start = System.currentTimeMillis();
|
||||||
|
writeResultToSequenceFile(id, generateEmploymentsURL(orcidId), token, orcidId, employments);
|
||||||
|
total_time = System.currentTimeMillis() - start;
|
||||||
|
requests++;
|
||||||
|
if (total_time < 1000) {
|
||||||
|
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
|
||||||
|
// hence
|
||||||
|
// the time between two http request in a thread must be 1 second
|
||||||
|
Thread.sleep(1000L - total_time);
|
||||||
|
}
|
||||||
|
if (requests % 30 == 0) {
|
||||||
|
log.info("Thread {} Downloaded {}", id, requests);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Throwable e) {
|
||||||
|
|
||||||
|
log.error("Thread {} Unable to save ORICD: {} item error", id, orcidId, e);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
works.close();
|
||||||
|
summary.close();
|
||||||
|
employments.close();
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Thread {} COMPLETE ", id);
|
||||||
|
log.info("Thread {} Downloaded {}", id, requests);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class ORCIDWorkerBuilder {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
private SequenceFile.Writer employments;
|
||||||
|
private SequenceFile.Writer summary;
|
||||||
|
private SequenceFile.Writer works;
|
||||||
|
private BlockingQueue<String> queue;
|
||||||
|
|
||||||
|
private String token;
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withId(final String id) {
|
||||||
|
this.id = id;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withEmployments(final SequenceFile.Writer sequenceFile) {
|
||||||
|
this.employments = sequenceFile;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withSummary(final SequenceFile.Writer sequenceFile) {
|
||||||
|
this.summary = sequenceFile;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withWorks(final SequenceFile.Writer sequenceFile) {
|
||||||
|
this.works = sequenceFile;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withAccessToken(final String accessToken) {
|
||||||
|
this.token = accessToken;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorkerBuilder withBlockingQueue(final BlockingQueue<String> queue) {
|
||||||
|
this.queue = queue;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ORCIDWorker build() {
|
||||||
|
if (this.summary == null || this.works == null || this.employments == null || StringUtils.isEmpty(token)
|
||||||
|
|| queue == null)
|
||||||
|
throw new RuntimeException("Unable to build missing required params");
|
||||||
|
return new ORCIDWorker(id, queue, employments, summary, works, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,171 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.orcid;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.concurrent.ArrayBlockingQueue;
|
||||||
|
import java.util.concurrent.BlockingQueue;
|
||||||
|
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||||
|
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
public class OrcidGetUpdatesFile {
|
||||||
|
|
||||||
|
private static Logger log = LoggerFactory.getLogger(OrcidGetUpdatesFile.class);
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
OrcidGetUpdatesFile.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/collection/orcid/download_orcid_update_parameter.json")))
|
||||||
|
|
||||||
|
);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String namenode = parser.get("namenode");
|
||||||
|
log.info("got variable namenode: {}", namenode);
|
||||||
|
|
||||||
|
final String master = parser.get("master");
|
||||||
|
log.info("got variable master: {}", master);
|
||||||
|
|
||||||
|
final String targetPath = parser.get("targetPath");
|
||||||
|
log.info("got variable targetPath: {}", targetPath);
|
||||||
|
|
||||||
|
final String apiURL = parser.get("apiURL");
|
||||||
|
log.info("got variable apiURL: {}", apiURL);
|
||||||
|
|
||||||
|
final String accessToken = parser.get("accessToken");
|
||||||
|
log.info("got variable accessToken: {}", accessToken);
|
||||||
|
|
||||||
|
final String graphPath = parser.get("graphPath");
|
||||||
|
log.info("got variable graphPath: {}", graphPath);
|
||||||
|
|
||||||
|
final SparkSession spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(OrcidGetUpdatesFile.class.getName())
|
||||||
|
.master(master)
|
||||||
|
.getOrCreate();
|
||||||
|
|
||||||
|
final String latestDate = spark
|
||||||
|
.read()
|
||||||
|
.load(graphPath + "/Authors")
|
||||||
|
.selectExpr("max(lastModifiedDate)")
|
||||||
|
.first()
|
||||||
|
.getString(0);
|
||||||
|
|
||||||
|
log.info("latest date is {}", latestDate);
|
||||||
|
|
||||||
|
final FileSystem fileSystem = FileSystem.get(getHadoopConfiguration(namenode));
|
||||||
|
|
||||||
|
new OrcidGetUpdatesFile().readTar(fileSystem, accessToken, apiURL, targetPath, latestDate);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private SequenceFile.Writer createFile(Path aPath, FileSystem fileSystem) throws IOException {
|
||||||
|
return SequenceFile
|
||||||
|
.createWriter(
|
||||||
|
fileSystem.getConf(),
|
||||||
|
SequenceFile.Writer.file(aPath),
|
||||||
|
SequenceFile.Writer.keyClass(Text.class),
|
||||||
|
SequenceFile.Writer.valueClass(Text.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
private ORCIDWorker createWorker(final String id, final String targetPath, final BlockingQueue<String> queue,
|
||||||
|
final String accessToken, FileSystem fileSystem) throws Exception {
|
||||||
|
return ORCIDWorker
|
||||||
|
.builder()
|
||||||
|
.withId(id)
|
||||||
|
.withEmployments(createFile(new Path(String.format("%s/employments_%s", targetPath, id)), fileSystem))
|
||||||
|
.withSummary(createFile(new Path(String.format("%s/summary_%s", targetPath, id)), fileSystem))
|
||||||
|
.withWorks(createFile(new Path(String.format("%s/works_%s", targetPath, id)), fileSystem))
|
||||||
|
.withAccessToken(accessToken)
|
||||||
|
.withBlockingQueue(queue)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void readTar(FileSystem fileSystem, final String accessToken, final String apiURL, final String targetPath,
|
||||||
|
final String startDate) throws Exception {
|
||||||
|
final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiURL).openConnection();
|
||||||
|
final HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
urlConn.setInstanceFollowRedirects(false);
|
||||||
|
urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000);
|
||||||
|
urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000);
|
||||||
|
if (urlConn.getResponseCode() > 199 && urlConn.getResponseCode() < 300) {
|
||||||
|
InputStream input = urlConn.getInputStream();
|
||||||
|
|
||||||
|
Path hdfsWritePath = new Path("/tmp/orcid_updates.tar.gz");
|
||||||
|
final FSDataOutputStream fsDataOutputStream = fileSystem.create(hdfsWritePath, true);
|
||||||
|
IOUtils.copy(input, fsDataOutputStream);
|
||||||
|
fsDataOutputStream.flush();
|
||||||
|
fsDataOutputStream.close();
|
||||||
|
FSDataInputStream updateFile = fileSystem.open(hdfsWritePath);
|
||||||
|
TarArchiveInputStream tais = new TarArchiveInputStream(new GzipCompressorInputStream(
|
||||||
|
new BufferedInputStream(
|
||||||
|
updateFile.getWrappedStream())));
|
||||||
|
TarArchiveEntry entry;
|
||||||
|
|
||||||
|
BlockingQueue<String> queue = new ArrayBlockingQueue<String>(3000);
|
||||||
|
final List<ORCIDWorker> workers = new ArrayList<>();
|
||||||
|
for (int i = 0; i < 22; i++) {
|
||||||
|
workers.add(createWorker("" + i, targetPath, queue, accessToken, fileSystem));
|
||||||
|
}
|
||||||
|
workers.forEach(Thread::start);
|
||||||
|
|
||||||
|
while ((entry = tais.getNextTarEntry()) != null) {
|
||||||
|
|
||||||
|
if (entry.isFile()) {
|
||||||
|
|
||||||
|
BufferedReader br = new BufferedReader(new InputStreamReader(tais));
|
||||||
|
System.out.println(br.readLine());
|
||||||
|
br
|
||||||
|
.lines()
|
||||||
|
.map(l -> l.split(","))
|
||||||
|
.filter(s -> StringUtils.compare(s[3].substring(0, 10), startDate) > 0)
|
||||||
|
.map(s -> s[0])
|
||||||
|
.forEach(s -> {
|
||||||
|
try {
|
||||||
|
queue.put(s);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < 22; i++) {
|
||||||
|
queue.put(ORCIDWorker.JOB_COMPLETE);
|
||||||
|
}
|
||||||
|
for (ORCIDWorker worker : workers) {
|
||||||
|
worker.join();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +1,15 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.orcid;
|
package eu.dnetlib.dhp.collection.orcid;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.Collections;
|
import java.util.stream.Collectors;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentFactory;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -40,8 +44,8 @@ public class OrcidParser {
|
||||||
private static final String NS_ERROR = "error";
|
private static final String NS_ERROR = "error";
|
||||||
private static final String NS_HISTORY = "history";
|
private static final String NS_HISTORY = "history";
|
||||||
private static final String NS_HISTORY_URL = "http://www.orcid.org/ns/history";
|
private static final String NS_HISTORY_URL = "http://www.orcid.org/ns/history";
|
||||||
private static final String NS_BULK_URL = "http://www.orcid.org/ns/bulk";
|
private static final String NS_EMPLOYMENT = "employment";
|
||||||
private static final String NS_BULK = "bulk";
|
private static final String NS_EMPLOYMENT_URL = "http://www.orcid.org/ns/employment";
|
||||||
private static final String NS_EXTERNAL = "external-identifier";
|
private static final String NS_EXTERNAL = "external-identifier";
|
||||||
private static final String NS_EXTERNAL_URL = "http://www.orcid.org/ns/external-identifier";
|
private static final String NS_EXTERNAL_URL = "http://www.orcid.org/ns/external-identifier";
|
||||||
|
|
||||||
|
@ -61,6 +65,7 @@ public class OrcidParser {
|
||||||
ap.declareXPathNameSpace(NS_WORK, NS_WORK_URL);
|
ap.declareXPathNameSpace(NS_WORK, NS_WORK_URL);
|
||||||
ap.declareXPathNameSpace(NS_EXTERNAL, NS_EXTERNAL_URL);
|
ap.declareXPathNameSpace(NS_EXTERNAL, NS_EXTERNAL_URL);
|
||||||
ap.declareXPathNameSpace(NS_ACTIVITIES, NS_ACTIVITIES_URL);
|
ap.declareXPathNameSpace(NS_ACTIVITIES, NS_ACTIVITIES_URL);
|
||||||
|
ap.declareXPathNameSpace(NS_EMPLOYMENT, NS_EMPLOYMENT_URL);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Author parseSummary(final String xml) {
|
public Author parseSummary(final String xml) {
|
||||||
|
@ -70,13 +75,15 @@ public class OrcidParser {
|
||||||
generateParsedDocument(xml);
|
generateParsedDocument(xml);
|
||||||
List<VtdUtilityParser.Node> recordNodes = VtdUtilityParser
|
List<VtdUtilityParser.Node> recordNodes = VtdUtilityParser
|
||||||
.getTextValuesWithAttributes(
|
.getTextValuesWithAttributes(
|
||||||
ap, vn, "//record:record", Arrays.asList("path"));
|
ap, vn, "//record:record", Collections.singletonList("path"));
|
||||||
if (!recordNodes.isEmpty()) {
|
if (!recordNodes.isEmpty()) {
|
||||||
final String oid = (recordNodes.get(0).getAttributes().get("path")).substring(1);
|
final String oid = (recordNodes.get(0).getAttributes().get("path")).substring(1);
|
||||||
author.setOrcid(oid);
|
author.setOrcid(oid);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
final String ltm = VtdUtilityParser.getSingleValue(ap, vn, "//common:last-modified-date");
|
||||||
|
author.setLastModifiedDate(ltm);
|
||||||
List<VtdUtilityParser.Node> personNodes = VtdUtilityParser
|
List<VtdUtilityParser.Node> personNodes = VtdUtilityParser
|
||||||
.getTextValuesWithAttributes(
|
.getTextValuesWithAttributes(
|
||||||
ap, vn, "//person:name", Arrays.asList("visibility"));
|
ap, vn, "//person:name", Arrays.asList("visibility"));
|
||||||
|
@ -129,6 +136,64 @@ public class OrcidParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<Work> parseWorks(final String xml) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
String oid;
|
||||||
|
|
||||||
|
generateParsedDocument(xml);
|
||||||
|
List<VtdUtilityParser.Node> workNodes = VtdUtilityParser
|
||||||
|
.getTextValuesWithAttributes(ap, vn, "//activities:works", Arrays.asList("path", "visibility"));
|
||||||
|
if (!workNodes.isEmpty()) {
|
||||||
|
oid = (workNodes.get(0).getAttributes().get("path")).split("/")[1];
|
||||||
|
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
final List<Work> works = new ArrayList<>();
|
||||||
|
ap.selectXPath("//work:work-summary");
|
||||||
|
|
||||||
|
while (ap.evalXPath() != -1) {
|
||||||
|
final Work work = new Work();
|
||||||
|
work.setOrcid(oid);
|
||||||
|
final AutoPilot ap1 = new AutoPilot(ap.getNav());
|
||||||
|
ap1.selectXPath("./work:title/common:title");
|
||||||
|
while (ap1.evalXPath() != -1) {
|
||||||
|
int it = vn.getText();
|
||||||
|
work.setTitle(vn.toNormalizedString(it));
|
||||||
|
}
|
||||||
|
ap1.selectXPath(".//common:external-id");
|
||||||
|
while (ap1.evalXPath() != -1) {
|
||||||
|
final Pid pid = new Pid();
|
||||||
|
|
||||||
|
final AutoPilot ap2 = new AutoPilot(ap1.getNav());
|
||||||
|
|
||||||
|
ap2.selectXPath("./common:external-id-type");
|
||||||
|
while (ap2.evalXPath() != -1) {
|
||||||
|
int it = vn.getText();
|
||||||
|
pid.setSchema(vn.toNormalizedString(it));
|
||||||
|
}
|
||||||
|
ap2.selectXPath("./common:external-id-value");
|
||||||
|
while (ap2.evalXPath() != -1) {
|
||||||
|
int it = vn.getText();
|
||||||
|
pid.setValue(vn.toNormalizedString(it));
|
||||||
|
}
|
||||||
|
|
||||||
|
work.addPid(pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
works.add(work);
|
||||||
|
}
|
||||||
|
return works;
|
||||||
|
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("Error on parsing {}", xml);
|
||||||
|
log.error(e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public Work parseWork(final String xml) {
|
public Work parseWork(final String xml) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -176,11 +241,15 @@ public class OrcidParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String extractEmploymentDate(final String xpath) throws Exception {
|
private String extractEmploymentDate(final String xpath) throws Exception {
|
||||||
|
return extractEmploymentDate(xpath, ap);
|
||||||
|
}
|
||||||
|
|
||||||
ap.selectXPath(xpath);
|
private String extractEmploymentDate(final String xpath, AutoPilot pp) throws Exception {
|
||||||
|
|
||||||
|
pp.selectXPath(xpath);
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
while (ap.evalXPath() != -1) {
|
while (pp.evalXPath() != -1) {
|
||||||
final AutoPilot ap1 = new AutoPilot(ap.getNav());
|
final AutoPilot ap1 = new AutoPilot(pp.getNav());
|
||||||
ap1.selectXPath("./common:year");
|
ap1.selectXPath("./common:year");
|
||||||
while (ap1.evalXPath() != -1) {
|
while (ap1.evalXPath() != -1) {
|
||||||
int it = vn.getText();
|
int it = vn.getText();
|
||||||
|
@ -203,6 +272,104 @@ public class OrcidParser {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<Employment> parseEmployments(final String xml) {
|
||||||
|
try {
|
||||||
|
String oid;
|
||||||
|
Map<String, String> nsContext = getNameSpaceMap();
|
||||||
|
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
||||||
|
Document doc = DocumentHelper.parseText(xml);
|
||||||
|
oid = doc.valueOf("//activities:employments/@path");
|
||||||
|
if (oid == null || StringUtils.isEmpty(oid))
|
||||||
|
return null;
|
||||||
|
final String orcid = oid.split("/")[1];
|
||||||
|
|
||||||
|
List<Node> nodes = doc.selectNodes("//employment:employment-summary");
|
||||||
|
return nodes.stream().map(n -> {
|
||||||
|
final Employment e = new Employment();
|
||||||
|
e.setOrcid(orcid);
|
||||||
|
|
||||||
|
final String depName = n.valueOf(".//common:department-name");
|
||||||
|
if (StringUtils.isNotBlank(depName))
|
||||||
|
e.setDepartmentName(depName);
|
||||||
|
final String roleTitle = n.valueOf(".//common:role-title");
|
||||||
|
e.setRoleTitle(roleTitle);
|
||||||
|
final String organizationName = n.valueOf(".//common:organization/common:name");
|
||||||
|
if (StringUtils.isEmpty(e.getDepartmentName()))
|
||||||
|
e.setDepartmentName(organizationName);
|
||||||
|
final Pid p = new Pid();
|
||||||
|
final String pid = n
|
||||||
|
.valueOf(
|
||||||
|
"./common:organization/common:disambiguated-organization/common:disambiguated-organization-identifier");
|
||||||
|
p.setValue(pid);
|
||||||
|
final String pidType = n
|
||||||
|
.valueOf("./common:organization/common:disambiguated-organization/common:disambiguation-source");
|
||||||
|
p.setSchema(pidType);
|
||||||
|
e.setAffiliationId(p);
|
||||||
|
|
||||||
|
final StringBuilder aDate = new StringBuilder();
|
||||||
|
final String sy = n.valueOf("./common:start-date/common:year");
|
||||||
|
if (StringUtils.isNotBlank(sy)) {
|
||||||
|
aDate.append(sy);
|
||||||
|
final String sm = n.valueOf("./common:start-date/common:month");
|
||||||
|
final String sd = n.valueOf("./common:start-date/common:day");
|
||||||
|
aDate.append("-");
|
||||||
|
if (StringUtils.isNotBlank(sm))
|
||||||
|
aDate.append(sm);
|
||||||
|
else
|
||||||
|
aDate.append("01");
|
||||||
|
aDate.append("-");
|
||||||
|
if (StringUtils.isNotBlank(sd))
|
||||||
|
aDate.append(sd);
|
||||||
|
else
|
||||||
|
aDate.append("01");
|
||||||
|
e.setEndDate(aDate.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
final String ey = n.valueOf("./common:end-date/common:year");
|
||||||
|
if (StringUtils.isNotBlank(ey)) {
|
||||||
|
aDate.append(ey);
|
||||||
|
final String em = n.valueOf("./common:end-date/common:month");
|
||||||
|
final String ed = n.valueOf("./common:end-date/common:day");
|
||||||
|
aDate.append("-");
|
||||||
|
if (StringUtils.isNotBlank(em))
|
||||||
|
aDate.append(em);
|
||||||
|
else
|
||||||
|
aDate.append("01");
|
||||||
|
aDate.append("-");
|
||||||
|
if (StringUtils.isNotBlank(ed))
|
||||||
|
aDate.append(ed);
|
||||||
|
else
|
||||||
|
aDate.append("01");
|
||||||
|
e.setEndDate(aDate.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
return e;
|
||||||
|
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("Error on parsing {}", xml);
|
||||||
|
log.error(e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private static Map<String, String> getNameSpaceMap() {
|
||||||
|
Map<String, String> nsContext = new HashMap<>();
|
||||||
|
nsContext.put(NS_COMMON, NS_COMMON_URL);
|
||||||
|
nsContext.put(NS_PERSON, NS_PERSON_URL);
|
||||||
|
nsContext.put(NS_DETAILS, NS_DETAILS_URL);
|
||||||
|
nsContext.put(NS_OTHER, NS_OTHER_URL);
|
||||||
|
nsContext.put(NS_RECORD, NS_RECORD_URL);
|
||||||
|
nsContext.put(NS_ERROR, NS_ERROR_URL);
|
||||||
|
nsContext.put(NS_HISTORY, NS_HISTORY_URL);
|
||||||
|
nsContext.put(NS_WORK, NS_WORK_URL);
|
||||||
|
nsContext.put(NS_EXTERNAL, NS_EXTERNAL_URL);
|
||||||
|
nsContext.put(NS_ACTIVITIES, NS_ACTIVITIES_URL);
|
||||||
|
nsContext.put(NS_EMPLOYMENT, NS_EMPLOYMENT_URL);
|
||||||
|
return nsContext;
|
||||||
|
}
|
||||||
|
|
||||||
public Employment parseEmployment(final String xml) {
|
public Employment parseEmployment(final String xml) {
|
||||||
try {
|
try {
|
||||||
final Employment employment = new Employment();
|
final Employment employment = new Employment();
|
||||||
|
|
|
@ -18,6 +18,8 @@ public class Author extends ORCIDItem {
|
||||||
|
|
||||||
private String biography;
|
private String biography;
|
||||||
|
|
||||||
|
private String lastModifiedDate;
|
||||||
|
|
||||||
public String getBiography() {
|
public String getBiography() {
|
||||||
return biography;
|
return biography;
|
||||||
}
|
}
|
||||||
|
@ -74,6 +76,14 @@ public class Author extends ORCIDItem {
|
||||||
this.otherPids = otherPids;
|
this.otherPids = otherPids;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getLastModifiedDate() {
|
||||||
|
return lastModifiedDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLastModifiedDate(String lastModifiedDate) {
|
||||||
|
this.lastModifiedDate = lastModifiedDate;
|
||||||
|
}
|
||||||
|
|
||||||
public void addOtherPid(final Pid pid) {
|
public void addOtherPid(final Pid pid) {
|
||||||
|
|
||||||
if (otherPids == null)
|
if (otherPids == null)
|
||||||
|
|
|
@ -10,7 +10,8 @@ import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
public interface CollectorPlugin {
|
public interface CollectorPlugin {
|
||||||
|
|
||||||
enum NAME {
|
enum NAME {
|
||||||
oai, other, rest_json2xml, file, fileGzip;
|
|
||||||
|
oai, other, rest_json2xml, file, fileGzip, baseDump;
|
||||||
|
|
||||||
public enum OTHER_NAME {
|
public enum OTHER_NAME {
|
||||||
mdstore_mongodb_dump, mdstore_mongodb
|
mdstore_mongodb_dump, mdstore_mongodb
|
||||||
|
|
|
@ -0,0 +1,171 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.StringWriter;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.concurrent.BlockingQueue;
|
||||||
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
|
|
||||||
|
import javax.xml.stream.XMLEventReader;
|
||||||
|
import javax.xml.stream.XMLEventWriter;
|
||||||
|
import javax.xml.stream.XMLInputFactory;
|
||||||
|
import javax.xml.stream.XMLOutputFactory;
|
||||||
|
import javax.xml.stream.events.EndElement;
|
||||||
|
import javax.xml.stream.events.StartElement;
|
||||||
|
import javax.xml.stream.events.XMLEvent;
|
||||||
|
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||||
|
import org.apache.commons.compress.compressors.CompressorInputStream;
|
||||||
|
import org.apache.commons.compress.compressors.CompressorStreamFactory;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
|
||||||
|
public class BaseCollectorIterator implements Iterator<String> {
|
||||||
|
|
||||||
|
private String nextElement;
|
||||||
|
|
||||||
|
private final BlockingQueue<String> queue = new LinkedBlockingQueue<>(100);
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(BaseCollectorIterator.class);
|
||||||
|
|
||||||
|
private static final String END_ELEM = "__END__";
|
||||||
|
|
||||||
|
public BaseCollectorIterator(final FileSystem fs, final Path filePath, final AggregatorReport report) {
|
||||||
|
new Thread(() -> importHadoopFile(fs, filePath, report)).start();
|
||||||
|
try {
|
||||||
|
this.nextElement = this.queue.take();
|
||||||
|
} catch (final InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected BaseCollectorIterator(final String resourcePath, final AggregatorReport report) {
|
||||||
|
new Thread(() -> importTestFile(resourcePath, report)).start();
|
||||||
|
try {
|
||||||
|
this.nextElement = this.queue.take();
|
||||||
|
} catch (final InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized boolean hasNext() {
|
||||||
|
return (this.nextElement != null) & !END_ELEM.equals(this.nextElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized String next() {
|
||||||
|
try {
|
||||||
|
return END_ELEM.equals(this.nextElement) ? null : this.nextElement;
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
this.nextElement = this.queue.take();
|
||||||
|
} catch (final InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void importHadoopFile(final FileSystem fs, final Path filePath, final AggregatorReport report) {
|
||||||
|
log.info("I start to read the TAR stream");
|
||||||
|
|
||||||
|
try (InputStream origInputStream = fs.open(filePath);
|
||||||
|
final TarArchiveInputStream tarInputStream = new TarArchiveInputStream(origInputStream)) {
|
||||||
|
importTarStream(tarInputStream, report);
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
throw new RuntimeException("Error processing BASE records", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void importTestFile(final String resourcePath, final AggregatorReport report) {
|
||||||
|
try (final InputStream origInputStream = BaseCollectorIterator.class.getResourceAsStream(resourcePath);
|
||||||
|
final TarArchiveInputStream tarInputStream = new TarArchiveInputStream(origInputStream)) {
|
||||||
|
importTarStream(tarInputStream, report);
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
throw new RuntimeException("Error processing BASE records", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void importTarStream(final TarArchiveInputStream tarInputStream, final AggregatorReport report) {
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
final XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance();
|
||||||
|
final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newInstance();
|
||||||
|
|
||||||
|
try {
|
||||||
|
TarArchiveEntry entry;
|
||||||
|
while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) {
|
||||||
|
final String name = entry.getName();
|
||||||
|
|
||||||
|
if (!entry.isDirectory() && name.contains("ListRecords") && name.endsWith(".bz2")) {
|
||||||
|
|
||||||
|
log.info("Processing file (BZIP): " + name);
|
||||||
|
|
||||||
|
final byte[] bzipData = new byte[(int) entry.getSize()];
|
||||||
|
IOUtils.readFully(tarInputStream, bzipData);
|
||||||
|
|
||||||
|
try (InputStream bzipIs = new ByteArrayInputStream(bzipData);
|
||||||
|
final BufferedInputStream bzipBis = new BufferedInputStream(bzipIs);
|
||||||
|
final CompressorInputStream bzipInput = new CompressorStreamFactory()
|
||||||
|
.createCompressorInputStream(bzipBis)) {
|
||||||
|
|
||||||
|
final XMLEventReader reader = xmlInputFactory.createXMLEventReader(bzipInput);
|
||||||
|
|
||||||
|
XMLEventWriter eventWriter = null;
|
||||||
|
StringWriter xmlWriter = null;
|
||||||
|
|
||||||
|
while (reader.hasNext()) {
|
||||||
|
final XMLEvent nextEvent = reader.nextEvent();
|
||||||
|
|
||||||
|
if (nextEvent.isStartElement()) {
|
||||||
|
final StartElement startElement = nextEvent.asStartElement();
|
||||||
|
if ("record".equals(startElement.getName().getLocalPart())) {
|
||||||
|
xmlWriter = new StringWriter();
|
||||||
|
eventWriter = xmlOutputFactory.createXMLEventWriter(xmlWriter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (eventWriter != null) {
|
||||||
|
eventWriter.add(nextEvent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextEvent.isEndElement()) {
|
||||||
|
final EndElement endElement = nextEvent.asEndElement();
|
||||||
|
if ("record".equals(endElement.getName().getLocalPart())) {
|
||||||
|
eventWriter.flush();
|
||||||
|
eventWriter.close();
|
||||||
|
|
||||||
|
this.queue.put(xmlWriter.toString());
|
||||||
|
|
||||||
|
eventWriter = null;
|
||||||
|
xmlWriter = null;
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.queue.put(END_ELEM); // TO INDICATE THE END OF THE QUEUE
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.error("Error processing BASE records", e);
|
||||||
|
report.put(e.getClass().getName(), e.getMessage());
|
||||||
|
throw new RuntimeException("Error processing BASE records", e);
|
||||||
|
} finally {
|
||||||
|
log.info("Total records (written in queue): " + count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,159 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentException;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.file.AbstractSplittedRecordPlugin;
|
||||||
|
import eu.dnetlib.dhp.common.DbClient;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
|
||||||
|
public class BaseCollectorPlugin implements CollectorPlugin {
|
||||||
|
|
||||||
|
private final FileSystem fs;
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(AbstractSplittedRecordPlugin.class);
|
||||||
|
|
||||||
|
// MAPPING AND FILTERING ARE DEFINED HERE:
|
||||||
|
// https://docs.google.com/document/d/1Aj-ZAV11b44MCrAAUCPiS2TUlXb6PnJEu1utCMAcCOU/edit
|
||||||
|
|
||||||
|
public BaseCollectorPlugin(final FileSystem fs) {
|
||||||
|
this.fs = fs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Stream<String> collect(final ApiDescriptor api, final AggregatorReport report) throws CollectorException {
|
||||||
|
// the path of the dump file on HDFS
|
||||||
|
// http://oai.base-search.net/initial_load/base_oaipmh_dump-current.tar
|
||||||
|
// it could be downloaded from iis-cdh5-test-gw.ocean.icm.edu.pl and then copied on HDFS
|
||||||
|
final Path filePath = Optional
|
||||||
|
.ofNullable(api.getBaseUrl())
|
||||||
|
.map(Path::new)
|
||||||
|
.orElseThrow(() -> new CollectorException("missing baseUrl"));
|
||||||
|
|
||||||
|
// get the parameters for the connection to the OpenAIRE database.
|
||||||
|
// the database is used to obtain the list of the datasources that the plugin will collect
|
||||||
|
final String dbUrl = api.getParams().get("dbUrl");
|
||||||
|
final String dbUser = api.getParams().get("dbUser");
|
||||||
|
final String dbPassword = api.getParams().get("dbPassword");
|
||||||
|
|
||||||
|
// the types(comma separated, empty value for all) that the plugin will collect,
|
||||||
|
// the types should be expressed in the format of the normalized types of BASE (for example 1,121,...)
|
||||||
|
final String acceptedNormTypesString = api.getParams().get("acceptedNormTypes");
|
||||||
|
|
||||||
|
log.info("baseUrl: {}", filePath);
|
||||||
|
log.info("dbUrl: {}", dbUrl);
|
||||||
|
log.info("dbUser: {}", dbUser);
|
||||||
|
log.info("dbPassword: {}", "***");
|
||||||
|
log.info("acceptedNormTypes: {}", acceptedNormTypesString);
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!this.fs.exists(filePath)) {
|
||||||
|
throw new CollectorException("path does not exist: " + filePath);
|
||||||
|
}
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
throw new CollectorException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
final Set<String> acceptedOpendoarIds = findAcceptedOpendoarIds(dbUrl, dbUser, dbPassword);
|
||||||
|
|
||||||
|
final Set<String> acceptedNormTypes = new HashSet<>();
|
||||||
|
if (StringUtils.isNotBlank(acceptedNormTypesString)) {
|
||||||
|
for (final String s : StringUtils.split(acceptedNormTypesString, ",")) {
|
||||||
|
if (StringUtils.isNotBlank(s)) {
|
||||||
|
acceptedNormTypes.add(s.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final Iterator<String> iterator = new BaseCollectorIterator(this.fs, filePath, report);
|
||||||
|
final Spliterator<String> spliterator = Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED);
|
||||||
|
return StreamSupport
|
||||||
|
.stream(spliterator, false)
|
||||||
|
.filter(doc -> filterXml(doc, acceptedOpendoarIds, acceptedNormTypes));
|
||||||
|
}
|
||||||
|
|
||||||
|
private Set<String> findAcceptedOpendoarIds(final String dbUrl, final String dbUser, final String dbPassword)
|
||||||
|
throws CollectorException {
|
||||||
|
final Set<String> accepted = new HashSet<>();
|
||||||
|
|
||||||
|
try (final DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
|
||||||
|
|
||||||
|
final String sql = IOUtils
|
||||||
|
.toString(
|
||||||
|
getClass().getResourceAsStream("/eu/dnetlib/dhp/collection/plugin/base/sql/opendoar-accepted.sql"));
|
||||||
|
|
||||||
|
dbClient.processResults(sql, row -> {
|
||||||
|
try {
|
||||||
|
final String dsId = row.getString("id");
|
||||||
|
log.info("Accepted Datasource: " + dsId);
|
||||||
|
accepted.add(dsId);
|
||||||
|
} catch (final SQLException e) {
|
||||||
|
log.error("Error in SQL", e);
|
||||||
|
throw new RuntimeException("Error in SQL", e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (final IOException e) {
|
||||||
|
log.error("Error accessong SQL", e);
|
||||||
|
throw new CollectorException("Error accessong SQL", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Accepted Datasources (TOTAL): " + accepted.size());
|
||||||
|
|
||||||
|
return accepted;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static boolean filterXml(final String xml,
|
||||||
|
final Set<String> acceptedOpendoarIds,
|
||||||
|
final Set<String> acceptedNormTypes) {
|
||||||
|
try {
|
||||||
|
|
||||||
|
final Document doc = DocumentHelper.parseText(xml);
|
||||||
|
|
||||||
|
final String id = doc.valueOf("//*[local-name()='collection']/@opendoar_id").trim();
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(id) || !acceptedOpendoarIds.contains("opendoar____::" + id)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (acceptedNormTypes.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (final Object s : doc.selectNodes("//*[local-name()='typenorm']")) {
|
||||||
|
if (acceptedNormTypes.contains(((Node) s).getText().trim())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
} catch (final DocumentException e) {
|
||||||
|
log.error("Error parsing document", e);
|
||||||
|
throw new RuntimeException("Error parsing document", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -52,8 +52,6 @@ public class RestIterator implements Iterator<String> {
|
||||||
|
|
||||||
private final String BASIC = "basic";
|
private final String BASIC = "basic";
|
||||||
|
|
||||||
private final JsonUtils jsonUtils;
|
|
||||||
|
|
||||||
private final String baseUrl;
|
private final String baseUrl;
|
||||||
private final String resumptionType;
|
private final String resumptionType;
|
||||||
private final String resumptionParam;
|
private final String resumptionParam;
|
||||||
|
@ -106,7 +104,6 @@ public class RestIterator implements Iterator<String> {
|
||||||
final String resultOutputFormat) {
|
final String resultOutputFormat) {
|
||||||
|
|
||||||
this.clientParams = clientParams;
|
this.clientParams = clientParams;
|
||||||
this.jsonUtils = new JsonUtils();
|
|
||||||
this.baseUrl = baseUrl;
|
this.baseUrl = baseUrl;
|
||||||
this.resumptionType = resumptionType;
|
this.resumptionType = resumptionType;
|
||||||
this.resumptionParam = resumptionParam;
|
this.resumptionParam = resumptionParam;
|
||||||
|
@ -126,6 +123,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new IllegalStateException("xml transformation init failed: " + e.getMessage());
|
throw new IllegalStateException("xml transformation init failed: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
initQueue();
|
initQueue();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,7 +188,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
String resultJson;
|
String resultJson;
|
||||||
String resultXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
String resultXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
||||||
String nextQuery = "";
|
String nextQuery = "";
|
||||||
String emptyXml = resultXml + "<" + JsonUtils.wrapName + "></" + JsonUtils.wrapName + ">";
|
String emptyXml = resultXml + "<" + JsonUtils.XML_WRAP_TAG + "></" + JsonUtils.XML_WRAP_TAG + ">";
|
||||||
Node resultNode = null;
|
Node resultNode = null;
|
||||||
NodeList nodeList = null;
|
NodeList nodeList = null;
|
||||||
String qUrlArgument = "";
|
String qUrlArgument = "";
|
||||||
|
@ -231,7 +229,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
resultStream = theHttpInputStream;
|
resultStream = theHttpInputStream;
|
||||||
if ("json".equals(resultOutputFormat)) {
|
if ("json".equals(resultOutputFormat)) {
|
||||||
resultJson = IOUtils.toString(resultStream, StandardCharsets.UTF_8);
|
resultJson = IOUtils.toString(resultStream, StandardCharsets.UTF_8);
|
||||||
resultXml = jsonUtils.convertToXML(resultJson);
|
resultXml = JsonUtils.convertToXML(resultJson);
|
||||||
resultStream = IOUtils.toInputStream(resultXml, UTF_8);
|
resultStream = IOUtils.toInputStream(resultXml, UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,82 +3,142 @@ package eu.dnetlib.dhp.collection.plugin.utils;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.json.JSONArray;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
|
||||||
public class JsonUtils {
|
public class JsonUtils {
|
||||||
|
public static final String XML_WRAP_TAG = "recordWrap";
|
||||||
|
private static final String XML_HEADER = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
||||||
|
private static final String INVALID_XMLTAG_CHARS = "!\"#$%&'()*+,/;<=>?@[\\]^`{|}~,";
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(JsonUtils.class);
|
private static final Log log = LogFactory.getLog(JsonUtils.class);
|
||||||
|
|
||||||
public static final String wrapName = "recordWrap";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* convert in JSON-KeyName 'whitespace(s)' to '_' and '/' to '_', '(' and ')' to ''
|
* cleanup in JSON-KeyName
|
||||||
* check W3C XML syntax: https://www.w3.org/TR/2006/REC-xml11-20060816/#sec-starttags for valid tag names
|
* check W3C XML syntax: https://www.w3.org/TR/2006/REC-xml11-20060816/#sec-starttags for valid tag names
|
||||||
* and work-around for the JSON to XML converting of org.json.XML-package.
|
* and work-around for the JSON to XML converting of org.json.XML-package.
|
||||||
*
|
*
|
||||||
* known bugs: doesn't prevent "key name":" ["sexy name",": penari","erotic dance"],
|
* @param input
|
||||||
*
|
* @return converted json object
|
||||||
* @param jsonInput
|
|
||||||
* @return convertedJsonKeynameOutput
|
|
||||||
*/
|
*/
|
||||||
public String syntaxConvertJsonKeyNames(String jsonInput) {
|
public static JSONObject cleanJsonObject(final JSONObject input) {
|
||||||
|
if (null == input) {
|
||||||
log.trace("before convertJsonKeyNames: " + jsonInput);
|
return null;
|
||||||
// pre-clean json - rid spaces of element names (misinterpreted as elements with attributes in xml)
|
|
||||||
// replace ' 's in JSON Namens with '_'
|
|
||||||
while (jsonInput.matches(".*\"([^\"]*)\\s+([^\"]*)\":.*")) {
|
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*)\\s+([^\"]*)\":", "\"$1_$2\":");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace forward-slash (sign '/' ) in JSON Names with '_'
|
JSONObject result = new JSONObject();
|
||||||
while (jsonInput.matches(".*\"([^\"]*)/([^\"]*)\":.*")) {
|
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*)/([^\"]*)\":", "\"$1_$2\":");
|
for (String key : input.keySet()) {
|
||||||
|
Object value = input.opt(key);
|
||||||
|
if (value != null) {
|
||||||
|
result.put(cleanKey(key), cleanValue(value));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace '(' in JSON Names with ''
|
return result;
|
||||||
while (jsonInput.matches(".*\"([^\"]*)[(]([^\"]*)\":.*")) {
|
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*)[(]([^\"]*)\":", "\"$1$2\":");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace ')' in JSON Names with ''
|
private static Object cleanValue(Object object) {
|
||||||
while (jsonInput.matches(".*\"([^\"]*)[)]([^\"]*)\":.*")) {
|
if (object instanceof JSONObject) {
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*)[)]([^\"]*)\":", "\"$1$2\":");
|
return cleanJsonObject((JSONObject) object);
|
||||||
|
} else if (object instanceof JSONArray) {
|
||||||
|
JSONArray array = (JSONArray) object;
|
||||||
|
JSONArray res = new JSONArray();
|
||||||
|
|
||||||
|
for (int i = array.length() - 1; i >= 0; i--) {
|
||||||
|
res.put(i, cleanValue(array.opt(i)));
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
} else if (object instanceof String) {
|
||||||
|
String value = (String) object;
|
||||||
|
|
||||||
|
// XML 1.0 Allowed characters
|
||||||
|
// Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||||
|
|
||||||
|
return value
|
||||||
|
.codePoints()
|
||||||
|
.filter(
|
||||||
|
cp -> cp == 0x9 || cp == 0xA || cp == 0xD || (cp >= 0x20 && cp <= 0xD7FF)
|
||||||
|
|| (cp >= 0xE000 && cp <= 0xFFFD)
|
||||||
|
|| (cp >= 0x10000 && cp <= 0x10FFFF))
|
||||||
|
.collect(
|
||||||
|
StringBuilder::new,
|
||||||
|
StringBuilder::appendCodePoint,
|
||||||
|
StringBuilder::append)
|
||||||
|
.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
// add prefix of startNumbers in JSON Keynames with 'n_'
|
return object;
|
||||||
while (jsonInput.matches(".*\"([^\"][0-9])([^\"]*)\":.*")) {
|
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"][0-9])([^\"]*)\":", "\"n_$1$2\":");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String cleanKey(String key) {
|
||||||
|
if (key == null || key.isEmpty()) {
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
// xml tag cannot begin with "-", ".", or a numeric digit.
|
||||||
|
switch (key.charAt(0)) {
|
||||||
|
case '-':
|
||||||
|
case '.':
|
||||||
|
key = "_" + key.substring(1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Character.isDigit(key.charAt(0))) {
|
||||||
|
if (key.matches("^[0-9]+$")) {
|
||||||
// add prefix of only numbers in JSON Keynames with 'm_'
|
// add prefix of only numbers in JSON Keynames with 'm_'
|
||||||
while (jsonInput.matches(".*\"([0-9]+)\":.*")) {
|
key = "m_" + key;
|
||||||
jsonInput = jsonInput.replaceAll("\"([0-9]+)\":", "\"m_$1\":");
|
} else {
|
||||||
|
// add prefix of startNumbers in JSON Keynames with 'n_'
|
||||||
|
key = "n_" + key;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace ':' between number like '2018-08-28T11:05:00Z' in JSON keynames with ''
|
StringBuilder res = new StringBuilder(key.length());
|
||||||
while (jsonInput.matches(".*\"([^\"]*[0-9]):([0-9][^\"]*)\":.*")) {
|
for (int i = 0; i < key.length(); i++) {
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*[0-9]):([0-9][^\"]*)\":", "\"$1$2\":");
|
char c = key.charAt(i);
|
||||||
|
|
||||||
|
// sequence of whitespaces are rendered as a single '_'
|
||||||
|
if (Character.isWhitespace(c)) {
|
||||||
|
while (i + 1 < key.length() && Character.isWhitespace(key.charAt(i + 1))) {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
res.append('_');
|
||||||
|
}
|
||||||
|
// remove invalid chars for xml tags with the expception of '=' and '/'
|
||||||
|
else if (INVALID_XMLTAG_CHARS.indexOf(c) >= 0) {
|
||||||
|
switch (c) {
|
||||||
|
case '=':
|
||||||
|
res.append('-');
|
||||||
|
break;
|
||||||
|
case '/':
|
||||||
|
res.append('_');
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
// all other chars are kept
|
||||||
|
else {
|
||||||
|
res.append(c);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace ',' in JSON Keynames with '.' to prevent , in xml tagnames.
|
return res.toString();
|
||||||
// while (jsonInput.matches(".*\"([^\"]*),([^\"]*)\":.*")) {
|
|
||||||
// jsonInput = jsonInput.replaceAll("\"([^\"]*),([^\"]*)\":", "\"$1.$2\":");
|
|
||||||
// }
|
|
||||||
|
|
||||||
// replace '=' in JSON Keynames with '-'
|
|
||||||
while (jsonInput.matches(".*\"([^\"]*)=([^\"]*)\":.*")) {
|
|
||||||
jsonInput = jsonInput.replaceAll("\"([^\"]*)=([^\"]*)\":", "\"$1-$2\":");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
log.trace("after syntaxConvertJsonKeyNames: " + jsonInput);
|
static public String convertToXML(final String jsonRecord) {
|
||||||
return jsonInput;
|
if (log.isTraceEnabled()) {
|
||||||
|
log.trace("input json: " + jsonRecord);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String convertToXML(final String jsonRecord) {
|
JSONObject jsonObject = cleanJsonObject(new org.json.JSONObject(jsonRecord));
|
||||||
String resultXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
String res = XML_HEADER + org.json.XML.toString(jsonObject, XML_WRAP_TAG); // wrap xml in single root element
|
||||||
org.json.JSONObject jsonObject = new org.json.JSONObject(syntaxConvertJsonKeyNames(jsonRecord));
|
|
||||||
resultXml += org.json.XML.toString(jsonObject, wrapName); // wrap xml in single root element
|
if (log.isTraceEnabled()) {
|
||||||
log.trace("before inputStream: " + resultXml);
|
log.trace("outout xml: " + res);
|
||||||
resultXml = XmlCleaner.cleanAllEntities(resultXml);
|
}
|
||||||
log.trace("after cleaning: " + resultXml);
|
return res;
|
||||||
return resultXml;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class XSLTTransformationFunction implements MapFunction<MetadataRecord, M
|
||||||
@Override
|
@Override
|
||||||
public MetadataRecord call(MetadataRecord value) {
|
public MetadataRecord call(MetadataRecord value) {
|
||||||
aggregationCounter.getTotalItems().add(1);
|
aggregationCounter.getTotalItems().add(1);
|
||||||
try {
|
|
||||||
Processor processor = new Processor(false);
|
Processor processor = new Processor(false);
|
||||||
|
|
||||||
processor.registerExtensionFunction(cleanFunction);
|
processor.registerExtensionFunction(cleanFunction);
|
||||||
|
@ -60,11 +60,18 @@ public class XSLTTransformationFunction implements MapFunction<MetadataRecord, M
|
||||||
comp.setParameter(datasourceIDParam, new XdmAtomicValue(value.getProvenance().getDatasourceId()));
|
comp.setParameter(datasourceIDParam, new XdmAtomicValue(value.getProvenance().getDatasourceId()));
|
||||||
QName datasourceNameParam = new QName(DATASOURCE_NAME_PARAM);
|
QName datasourceNameParam = new QName(DATASOURCE_NAME_PARAM);
|
||||||
comp.setParameter(datasourceNameParam, new XdmAtomicValue(value.getProvenance().getDatasourceName()));
|
comp.setParameter(datasourceNameParam, new XdmAtomicValue(value.getProvenance().getDatasourceName()));
|
||||||
XsltExecutable xslt = comp
|
XsltExecutable xslt;
|
||||||
|
XdmNode source;
|
||||||
|
try {
|
||||||
|
xslt = comp
|
||||||
.compile(new StreamSource(IOUtils.toInputStream(transformationRule, StandardCharsets.UTF_8)));
|
.compile(new StreamSource(IOUtils.toInputStream(transformationRule, StandardCharsets.UTF_8)));
|
||||||
XdmNode source = processor
|
source = processor
|
||||||
.newDocumentBuilder()
|
.newDocumentBuilder()
|
||||||
.build(new StreamSource(IOUtils.toInputStream(value.getBody(), StandardCharsets.UTF_8)));
|
.build(new StreamSource(IOUtils.toInputStream(value.getBody(), StandardCharsets.UTF_8)));
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new RuntimeException("Error on parsing xslt", e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
XsltTransformer trans = xslt.load();
|
XsltTransformer trans = xslt.load();
|
||||||
trans.setInitialContextNode(source);
|
trans.setInitialContextNode(source);
|
||||||
final StringWriter output = new StringWriter();
|
final StringWriter output = new StringWriter();
|
||||||
|
|
|
@ -17,6 +17,12 @@
|
||||||
"paramDescription": "the path to get the input data from Pubmed",
|
"paramDescription": "the path to get the input data from Pubmed",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"paramName": "oip",
|
||||||
|
"paramLongName": "openapcInputPath",
|
||||||
|
"paramDescription": "the path to get the input data from OpenAPC",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"paramName": "o",
|
"paramName": "o",
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
|
|
|
@ -31,6 +31,7 @@ spark2SqlQueryExecutionListeners=com.cloudera.spark.lineage.NavigatorQueryListen
|
||||||
# The following is needed as a property of a workflow
|
# The following is needed as a property of a workflow
|
||||||
oozie.wf.application.path=${oozieTopWfApplicationPath}
|
oozie.wf.application.path=${oozieTopWfApplicationPath}
|
||||||
|
|
||||||
crossrefInputPath=/data/bip-affiliations/data.json
|
crossrefInputPath=/data/bip-affiliations/crossref-data.json
|
||||||
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
||||||
|
openapcInputPath=/data/bip-affiliations/openapc-data.json
|
||||||
outputPath=/tmp/crossref-affiliations-output-v5
|
outputPath=/tmp/crossref-affiliations-output-v5
|
||||||
|
|
|
@ -9,6 +9,10 @@
|
||||||
<name>pubmedInputPath</name>
|
<name>pubmedInputPath</name>
|
||||||
<description>the path where to find the inferred affiliation relations from Pubmed</description>
|
<description>the path where to find the inferred affiliation relations from Pubmed</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>openapcInputPath</name>
|
||||||
|
<description>the path where to find the inferred affiliation relations from OpenAPC</description>
|
||||||
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the path where to store the actionset</description>
|
<description>the path where to store the actionset</description>
|
||||||
|
@ -102,6 +106,7 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--crossrefInputPath</arg><arg>${crossrefInputPath}</arg>
|
<arg>--crossrefInputPath</arg><arg>${crossrefInputPath}</arg>
|
||||||
<arg>--pubmedInputPath</arg><arg>${pubmedInputPath}</arg>
|
<arg>--pubmedInputPath</arg><arg>${pubmedInputPath}</arg>
|
||||||
|
<arg>--openapcInputPath</arg><arg>${openapcInputPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -16,5 +16,10 @@
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
"paramDescription": "the path of the new ActionSet",
|
"paramDescription": "the path of the new ActionSet",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
|
}, {
|
||||||
|
"paramName": "fd",
|
||||||
|
"paramLongName": "distributeDoi",
|
||||||
|
"paramDescription": "the path of the new ActionSet",
|
||||||
|
"paramRequired": false
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "sp",
|
||||||
|
"paramLongName": "sourcePath",
|
||||||
|
"paramDescription": "the zipped opencitations file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,30 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveDbName</name>
|
||||||
|
<value>openaire</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,153 @@
|
||||||
|
|
||||||
|
<workflow-app name="FOS no doi" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>fosPath</name>
|
||||||
|
<description>the input path of the resources to be extended</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>outputPath</name>
|
||||||
|
<description>the path where to store the actionset</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
<start to="getFOS"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<action name="getFOS">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Gets Data from FOS csv file</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.GetFOSSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${fosPath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/input/fos</arg>
|
||||||
|
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="prepareFos"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="prepareFos">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Produces the results from FOS</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareFOSSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/input/fos</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/prepared</arg>
|
||||||
|
<arg>--distributeDoi</arg><arg>false</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="produceActionSet"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<action name="produceActionSet">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Save the action set grouping results with the same id</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.fosnodoi.CreateActionSetSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/prepared/fos</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -1,13 +1,13 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"paramName": "if",
|
"paramName": "ip",
|
||||||
"paramLongName": "inputFile",
|
"paramLongName": "inputPath",
|
||||||
"paramDescription": "the zipped opencitations file",
|
"paramDescription": "the zipped opencitations file",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"paramName": "wp",
|
"paramName": "op",
|
||||||
"paramLongName": "workingPath",
|
"paramLongName": "outputPath",
|
||||||
"paramDescription": "the working path",
|
"paramDescription": "the working path",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
|
@ -16,11 +16,5 @@
|
||||||
"paramLongName": "hdfsNameNode",
|
"paramLongName": "hdfsNameNode",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
|
||||||
{
|
|
||||||
"paramName": "p",
|
|
||||||
"paramLongName": "prefix",
|
|
||||||
"paramDescription": "COCI or POCI",
|
|
||||||
"paramRequired": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"paramName": "wp",
|
"paramName": "ip",
|
||||||
"paramLongName": "workingPath",
|
"paramLongName": "inputPath",
|
||||||
"paramDescription": "the zipped opencitations file",
|
"paramDescription": "the zipped opencitations file",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
|
@ -24,15 +24,9 @@
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
|
||||||
{
|
|
||||||
"paramName": "if",
|
|
||||||
"paramLongName": "inputFile",
|
|
||||||
"paramDescription": "the hdfs name node",
|
|
||||||
"paramRequired": true
|
|
||||||
}, {
|
}, {
|
||||||
"paramName": "f",
|
"paramName": "nn",
|
||||||
"paramLongName": "format",
|
"paramLongName": "hdfsNameNode",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,9 @@
|
||||||
<case to="download">${wf:conf('resumeFrom') eq 'DownloadDump'}</case>
|
<case to="download">${wf:conf('resumeFrom') eq 'DownloadDump'}</case>
|
||||||
<case to="extract">${wf:conf('resumeFrom') eq 'ExtractContent'}</case>
|
<case to="extract">${wf:conf('resumeFrom') eq 'ExtractContent'}</case>
|
||||||
<case to="read">${wf:conf('resumeFrom') eq 'ReadContent'}</case>
|
<case to="read">${wf:conf('resumeFrom') eq 'ReadContent'}</case>
|
||||||
<default to="create_actionset"/> <!-- first action to be done when downloadDump is to be performed -->
|
<case to="remap">${wf:conf('resumeFrom') eq 'MapContent'}</case>
|
||||||
|
<case to="create_actionset">${wf:conf('resumeFrom') eq 'CreateAS'}</case>
|
||||||
|
<default to="deleteoutputpath"/> <!-- first action to be done when downloadDump is to be performed -->
|
||||||
</switch>
|
</switch>
|
||||||
</decision>
|
</decision>
|
||||||
|
|
||||||
|
@ -35,6 +37,15 @@
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="deleteoutputpath">
|
||||||
|
<fs>
|
||||||
|
<delete path='${inputPath}'/>
|
||||||
|
<mkdir path='${inputPath}'/>
|
||||||
|
</fs>
|
||||||
|
<ok to="download"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<action name="download">
|
<action name="download">
|
||||||
<shell xmlns="uri:oozie:shell-action:0.2">
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
@ -47,7 +58,28 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
<exec>download.sh</exec>
|
<exec>download.sh</exec>
|
||||||
<argument>${filelist}</argument>
|
<argument>${filelist}</argument>
|
||||||
<argument>${workingPath}/${prefix}/Original</argument>
|
<argument>${inputPath}/Original</argument>
|
||||||
|
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
|
||||||
|
<file>download.sh</file>
|
||||||
|
<capture-output/>
|
||||||
|
</shell>
|
||||||
|
<ok to="download_correspondence"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<!-- downloads the correspondence from the omid and the pid (doi, pmid etc)-->
|
||||||
|
<action name="download_correspondence">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapred.job.queue.name</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<exec>download_corr.sh</exec>
|
||||||
|
<argument>${filecorrespondence}</argument>
|
||||||
|
<argument>${inputPath}/correspondence</argument>
|
||||||
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
|
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
|
||||||
<file>download.sh</file>
|
<file>download.sh</file>
|
||||||
<capture-output/>
|
<capture-output/>
|
||||||
|
@ -60,9 +92,19 @@
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.actionmanager.opencitations.GetOpenCitationsRefs</main-class>
|
<main-class>eu.dnetlib.dhp.actionmanager.opencitations.GetOpenCitationsRefs</main-class>
|
||||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--inputFile</arg><arg>${inputFile}</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/Original</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingPath}/${prefix}</arg>
|
<arg>--outputPath</arg><arg>${inputPath}/Extracted</arg>
|
||||||
<arg>--prefix</arg><arg>${prefix}</arg>
|
</java>
|
||||||
|
<ok to="read"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="extract_correspondence">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.actionmanager.opencitations.GetOpenCitationsRefs</main-class>
|
||||||
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--inputPath</arg><arg>${inputPath}/correspondence</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${inputPath}/correspondence_extracted</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="read"/>
|
<ok to="read"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -85,11 +127,35 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--workingPath</arg><arg>${workingPath}/${prefix}/${prefix}</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/Extracted</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingPath}/${prefix}/${prefix}_JSON/</arg>
|
<arg>--outputPath</arg><arg>${inputPath}/JSON</arg>
|
||||||
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
||||||
<arg>--inputFile</arg><arg>${inputFileCoci}</arg>
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--format</arg><arg>${prefix}</arg>
|
</spark>
|
||||||
|
<ok to="remap"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="remap">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Produces the AS for OC</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.opencitations.MapOCIdsInPids</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--inputPath</arg><arg>${inputPath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPathExtraction}</arg>
|
||||||
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="create_actionset"/>
|
<ok to="create_actionset"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -112,7 +178,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${workingPath}</arg>
|
<arg>--inputPath</arg><arg>${outputPathExtraction}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "ip",
|
||||||
|
"paramLongName": "inputPath",
|
||||||
|
"paramDescription": "the zipped opencitations file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
},{
|
||||||
|
"paramName": "nn",
|
||||||
|
"paramLongName": "nameNode",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "ip",
|
||||||
|
"paramLongName": "inputPath",
|
||||||
|
"paramDescription": "the zipped opencitations file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,30 @@
|
||||||
|
[
|
||||||
|
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "d",
|
||||||
|
"paramLongName": "delimiter",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "if",
|
||||||
|
"paramLongName": "inputFile",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_metastore_uris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<value>http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorNumber</name>
|
||||||
|
<value>4</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<value>/user/spark/spark2ApplicationHistory</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<value>15G</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<value>6G</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<value>1</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,2 @@
|
||||||
|
#!/bin/bash
|
||||||
|
curl -L $1 | hdfs dfs -put - $2
|
|
@ -0,0 +1,82 @@
|
||||||
|
<workflow-app name="Transfomative Agreement Integration" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="resume_from"/>
|
||||||
|
|
||||||
|
<decision name="resume_from">
|
||||||
|
<switch>
|
||||||
|
<case to="download">${wf:conf('resumeFrom') eq 'DownloadDump'}</case>
|
||||||
|
<default to="create_actionset"/> <!-- first action to be done when downloadDump is to be performed -->
|
||||||
|
</switch>
|
||||||
|
</decision>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="download">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapred.job.queue.name</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<exec>download.sh</exec>
|
||||||
|
<argument>${inputFile}</argument>
|
||||||
|
<argument>${workingDir}/transformativeagreement/transformativeAgreement.json</argument>
|
||||||
|
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
|
||||||
|
<file>download.sh</file>
|
||||||
|
<capture-output/>
|
||||||
|
</shell>
|
||||||
|
<ok to="create_actionset"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="create_actionset">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Produces the AS for the Transformative Agreement</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.transformativeagreement.CreateActionSetSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--inputPath</arg><arg>${workingDir}/transformativeagreement/</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,26 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "m",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "the master name",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "t",
|
||||||
|
"paramLongName": "targetPath",
|
||||||
|
"paramDescription": "the target PATH of the DF tables",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "g",
|
||||||
|
"paramLongName": "graphPath",
|
||||||
|
"paramDescription": "the PATH of the current graph path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "u",
|
||||||
|
"paramLongName": "updatePath",
|
||||||
|
"paramDescription": "the PATH of the current graph update path",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,37 @@
|
||||||
|
[ {
|
||||||
|
"paramName": "n",
|
||||||
|
"paramLongName": "namenode",
|
||||||
|
"paramDescription": "the Name Node URI",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "m",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "the master name",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "t",
|
||||||
|
"paramLongName": "targetPath",
|
||||||
|
"paramDescription": "the target PATH where download the files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "a",
|
||||||
|
"paramLongName": "apiURL",
|
||||||
|
"paramDescription": "the URL to download the tar file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "g",
|
||||||
|
"paramLongName": "graphPath",
|
||||||
|
"paramDescription": "the path of the input graph",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "at",
|
||||||
|
"paramLongName": "accessToken",
|
||||||
|
"paramDescription": "the accessToken to contact API",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -16,6 +16,12 @@
|
||||||
"paramLongName": "sourcePath",
|
"paramLongName": "sourcePath",
|
||||||
"paramDescription": "the PATH of the ORCID sequence file",
|
"paramDescription": "the PATH of the ORCID sequence file",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "fu",
|
||||||
|
"paramLongName": "fromUpdate",
|
||||||
|
"paramDescription": "whether we have to generate table from dump or from update",
|
||||||
|
"paramRequired": false
|
||||||
}
|
}
|
||||||
|
|
||||||
]
|
]
|
|
@ -0,0 +1,23 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,114 @@
|
||||||
|
<workflow-app name="download_Update_ORCID" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>graphPath</name>
|
||||||
|
<description>the path to store the original ORCID dump</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>targetPath</name>
|
||||||
|
<description>the path to store the original ORCID dump</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>apiURL</name>
|
||||||
|
<value>http://74804fb637bd8e2fba5b-e0a029c2f87486cddec3b416996a6057.r3.cf1.rackcdn.com/last_modified.csv.tar</value>
|
||||||
|
<description>The URL of the update CSV list </description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>accessToken</name>
|
||||||
|
<description>The access token</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<start to="startUpdate"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="startUpdate">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Check Latest Orcid and Download updates</name>
|
||||||
|
<class>eu.dnetlib.dhp.collection.orcid.OrcidGetUpdatesFile</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=2g
|
||||||
|
--conf spark.sql.shuffle.partitions=3000
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--graphPath</arg><arg>${graphPath}</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
||||||
|
<arg>--apiURL</arg><arg>${apiURL}</arg>
|
||||||
|
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="generateTables"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="generateTables">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Generate ORCID Tables</name>
|
||||||
|
<class>eu.dnetlib.dhp.collection.orcid.SparkGenerateORCIDTable</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=2g
|
||||||
|
--conf spark.sql.shuffle.partitions=3000
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${targetPath}</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/updateTable</arg>
|
||||||
|
<arg>--fromUpdate</arg><arg>true</arg>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="updateTable"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="updateTable">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Update ORCID Tables</name>
|
||||||
|
<class>eu.dnetlib.dhp.collection.orcid.SparkApplyUpdate</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=2g
|
||||||
|
--conf spark.sql.shuffle.partitions=3000
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--graphPath</arg><arg>${graphPath}</arg>
|
||||||
|
<arg>--updatePath</arg><arg>${targetPath}/updateTable</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/newTable</arg>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,114 @@
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
INSERT INTO dsm_services(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
id,
|
||||||
|
officialname,
|
||||||
|
englishname,
|
||||||
|
namespaceprefix,
|
||||||
|
websiteurl,
|
||||||
|
logourl,
|
||||||
|
platform,
|
||||||
|
contactemail,
|
||||||
|
collectedfrom,
|
||||||
|
provenanceaction,
|
||||||
|
_typology_to_remove_,
|
||||||
|
eosc_type,
|
||||||
|
eosc_datasource_type,
|
||||||
|
research_entity_types,
|
||||||
|
thematic
|
||||||
|
) VALUES (
|
||||||
|
'openaire____::base_search',
|
||||||
|
'openaire____::base_search',
|
||||||
|
'Bielefeld Academic Search Engine (BASE)',
|
||||||
|
'Bielefeld Academic Search Engine (BASE)',
|
||||||
|
'base_search_',
|
||||||
|
'https://www.base-search.net',
|
||||||
|
'https://www.base-search.net/about/download/logo_224x57_white.gif',
|
||||||
|
'BASE',
|
||||||
|
'openaire-helpdesk@uni-bielefeld.de',
|
||||||
|
'infrastruct_::openaire',
|
||||||
|
'user:insert',
|
||||||
|
'aggregator::pubsrepository::unknown',
|
||||||
|
'Data Source',
|
||||||
|
'Aggregator',
|
||||||
|
ARRAY['Research Products'],
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dsm_service_organization(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
organization,
|
||||||
|
service
|
||||||
|
) VALUES (
|
||||||
|
'fairsharing_::org::214@@openaire____::base_search',
|
||||||
|
'fairsharing_::org::214',
|
||||||
|
'openaire____::base_search'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dsm_api(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
id,
|
||||||
|
service,
|
||||||
|
protocol,
|
||||||
|
baseurl,
|
||||||
|
metadata_identifier_path
|
||||||
|
) VALUES (
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'openaire____::base_search',
|
||||||
|
'baseDump',
|
||||||
|
'/user/michele.artini/base-import/base_oaipmh_dump-current.tar',
|
||||||
|
'//*[local-name()=''header'']/*[local-name()=''identifier'']'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO dsm_apiparams(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
api,
|
||||||
|
param,
|
||||||
|
value
|
||||||
|
) VALUES (
|
||||||
|
'api_________::openaire____::base_search::dump@@dbUrl',
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'dbUrl',
|
||||||
|
'jdbc:postgresql://postgresql.services.openaire.eu:5432/dnet_openaireplus'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dsm_apiparams(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
api,
|
||||||
|
param,
|
||||||
|
value
|
||||||
|
) VALUES (
|
||||||
|
'api_________::openaire____::base_search::dump@@dbUser',
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'dbUser',
|
||||||
|
'dnet'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dsm_apiparams(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
api,
|
||||||
|
param,
|
||||||
|
value
|
||||||
|
) VALUES (
|
||||||
|
'api_________::openaire____::base_search::dump@@dbPassword',
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'dbPassword',
|
||||||
|
'***'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dsm_apiparams(
|
||||||
|
_dnet_resource_identifier_,
|
||||||
|
api,
|
||||||
|
param,
|
||||||
|
value
|
||||||
|
) VALUES (
|
||||||
|
'api_________::openaire____::base_search::dump@@acceptedNormTypes',
|
||||||
|
'api_________::openaire____::base_search::dump',
|
||||||
|
'acceptedNormTypes',
|
||||||
|
'1,11,111,121,14,15,18,181,182,183,1A,6,7'
|
||||||
|
);
|
||||||
|
|
||||||
|
COMMIT;
|
|
@ -0,0 +1,9 @@
|
||||||
|
select s.id as id
|
||||||
|
from dsm_services s
|
||||||
|
where collectedfrom = 'openaire____::opendoar'
|
||||||
|
and jurisdiction = 'Institutional'
|
||||||
|
and s.id in (
|
||||||
|
select service from dsm_api where coalesce(compatibility_override, compatibility) = 'driver' or coalesce(compatibility_override, compatibility) = 'UNKNOWN'
|
||||||
|
) and s.id not in (
|
||||||
|
select service from dsm_api where coalesce(compatibility_override, compatibility) like '%openaire%'
|
||||||
|
);
|
|
@ -0,0 +1,11 @@
|
||||||
|
select
|
||||||
|
s.id as id,
|
||||||
|
s.jurisdiction as jurisdiction,
|
||||||
|
array_remove(array_agg(a.id || ' (compliance: ' || coalesce(a.compatibility_override, a.compatibility, 'UNKNOWN') || ')@@@' || coalesce(a.last_collection_total, 0)), NULL) as aggregations
|
||||||
|
from
|
||||||
|
dsm_services s
|
||||||
|
join dsm_api a on (s.id = a.service)
|
||||||
|
where
|
||||||
|
collectedfrom = 'openaire____::opendoar'
|
||||||
|
group by
|
||||||
|
s.id;
|
|
@ -0,0 +1,180 @@
|
||||||
|
<RESOURCE_PROFILE>
|
||||||
|
<HEADER>
|
||||||
|
<RESOURCE_IDENTIFIER value="c67911d6-9988-4a3b-b965-7d39bdd4a31d_Vm9jYWJ1bGFyeURTUmVzb3VyY2VzL1ZvY2FidWxhcnlEU1Jlc291cmNlVHlwZQ==" />
|
||||||
|
<RESOURCE_TYPE value="VocabularyDSResourceType" />
|
||||||
|
<RESOURCE_KIND value="VocabularyDSResources" />
|
||||||
|
<RESOURCE_URI value="" />
|
||||||
|
<DATE_OF_CREATION value="2024-02-13T11:15:48+00:00" />
|
||||||
|
</HEADER>
|
||||||
|
<BODY>
|
||||||
|
<CONFIGURATION>
|
||||||
|
<VOCABULARY_NAME code="base:normalized_types">base:normalized_types</VOCABULARY_NAME>
|
||||||
|
<VOCABULARY_DESCRIPTION>base:normalized_types</VOCABULARY_DESCRIPTION>
|
||||||
|
<TERMS>
|
||||||
|
<TERM native_name="Text" code="Text" english_name="Text" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="1" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Book" code="Book" english_name="Book" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="11" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Book part" code="Book part" english_name="Book part" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="111" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Journal/Newspaper" code="Journal/Newspaper" english_name="Journal/Newspaper" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="12" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Article contribution" code="Article contribution" english_name="Article contribution" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="121" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Other non-article" code="Other non-article" english_name="Other non-article" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="122" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Conference object" code="Conference object" english_name="Conference object" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="13" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Report" code="Report" english_name="Report" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="14" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Review" code="Review" english_name="Review" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="15" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Course material" code="Course material" english_name="Course material" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="16" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Lecture" code="Lecture" english_name="Lecture" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="17" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Thesis" code="Thesis" english_name="Thesis" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="18" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Bachelor's thesis" code="Bachelor's thesis" english_name="Bachelor's thesis" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="181" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Master's thesis" code="Master's thesis" english_name="Master's thesis" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="182" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Doctoral and postdoctoral thesis" code="Doctoral and postdoctoral thesis" english_name="Doctoral and postdoctoral thesis" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="183" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Manuscript" code="Manuscript" english_name="Manuscript" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="19" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Patent" code="Patent" english_name="Patent" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="1A" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Musical notation" code="Musical notation" english_name="Musical notation" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="2" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Map" code="Map" english_name="Map" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="3" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Audio" code="Audio" english_name="Audio" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="4" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Image/Video" code="Image/Video" english_name="Image/Video" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="5" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Still image" code="Still image" english_name="Still image" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="51" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Moving image/Video" code="Moving image/Video" english_name="Moving image/Video" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="52" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Software" code="Software" english_name="Software" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="6" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Dataset" code="Dataset" english_name="Dataset" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="7" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
<TERM native_name="Unknown" code="Unknown" english_name="Unknown" encoding="BASE">
|
||||||
|
<SYNONYMS>
|
||||||
|
<SYNONYM term="F" encoding="BASE" />
|
||||||
|
</SYNONYMS>
|
||||||
|
<RELATIONS />
|
||||||
|
</TERM>
|
||||||
|
|
||||||
|
</TERMS>
|
||||||
|
</CONFIGURATION>
|
||||||
|
<STATUS>
|
||||||
|
<LAST_UPDATE value="2013-11-18T10:46:36Z" />
|
||||||
|
</STATUS>
|
||||||
|
<SECURITY_PARAMETERS>String</SECURITY_PARAMETERS>
|
||||||
|
</BODY>
|
||||||
|
</RESOURCE_PROFILE>
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,432 @@
|
||||||
|
<RESOURCE_PROFILE>
|
||||||
|
<HEADER>
|
||||||
|
<RESOURCE_IDENTIFIER value="" />
|
||||||
|
<RESOURCE_TYPE value="TransformationRuleDSResourceType" />
|
||||||
|
<RESOURCE_KIND value="TransformationRuleDSResources" />
|
||||||
|
<RESOURCE_URI value="" />
|
||||||
|
<DATE_OF_CREATION value="2024-03-05T11:23:00+00:00" />
|
||||||
|
</HEADER>
|
||||||
|
<BODY>
|
||||||
|
<CONFIGURATION>
|
||||||
|
<SOURCE_METADATA_FORMAT interpretation="cleaned" layout="store" name="dc" />
|
||||||
|
<SINK_METADATA_FORMAT name="oaf_hbase" />
|
||||||
|
<IMPORTED />
|
||||||
|
<SCRIPT>
|
||||||
|
<TITLE>xslt_base2oaf_hadoop</TITLE>
|
||||||
|
<CODE>
|
||||||
|
<xsl:stylesheet xmlns:oaire="http://namespace.openaire.eu/schema/oaire/" xmlns:dateCleaner="http://eu/dnetlib/transform/dateISO"
|
||||||
|
xmlns:base_dc="http://oai.base-search.net/base_dc/"
|
||||||
|
xmlns:datacite="http://datacite.org/schema/kernel-4" xmlns:dr="http://www.driver-repository.eu/namespace/dr" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:vocabulary="http://eu/dnetlib/transform/clean" xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||||
|
xmlns:oai="http://www.openarchives.org/OAI/2.0/" xmlns:dri="http://www.driver-repository.eu/namespace/dri" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
exclude-result-prefixes="xsl vocabulary dateCleaner base_dc" version="2.0">
|
||||||
|
<xsl:param name="varOfficialName" />
|
||||||
|
<xsl:param name="varDataSourceId" />
|
||||||
|
<xsl:param name="varFP7" select="'corda_______::'" />
|
||||||
|
<xsl:param name="varH2020" select="'corda__h2020::'" />
|
||||||
|
<xsl:param name="repoCode" select="substring-before(//*[local-name() = 'header']/*[local-name()='recordIdentifier'], ':')" />
|
||||||
|
<xsl:param name="index" select="0" />
|
||||||
|
<xsl:param name="transDate" select="current-dateTime()" />
|
||||||
|
|
||||||
|
<xsl:template name="terminate">
|
||||||
|
<xsl:message terminate="yes">
|
||||||
|
record is not compliant, transformation is interrupted.
|
||||||
|
</xsl:message>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="/">
|
||||||
|
<record>
|
||||||
|
<xsl:apply-templates select="//*[local-name() = 'header']" />
|
||||||
|
|
||||||
|
|
||||||
|
<!-- TO EVALUATE
|
||||||
|
base_dc:authod_id
|
||||||
|
base_dc:authod_id/base_dc:creator_id
|
||||||
|
base_dc:authod_id/base_dc:creator_name
|
||||||
|
|
||||||
|
example:
|
||||||
|
|
||||||
|
<dc:creator>ALBU, Svetlana</dc:creator>
|
||||||
|
|
||||||
|
<base_dc:authod_id>
|
||||||
|
<base_dc:creator_name>ALBU, Svetlana</base_dc:creator_name>
|
||||||
|
<base_dc:creator_id>https://orcid.org/0000-0002-8648-950X</base_dc:creator_id>
|
||||||
|
</base_dc:authod_id>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- NOT USED
|
||||||
|
base_dc:global_id (I used oai:identifier)
|
||||||
|
base_dc:collection/text()
|
||||||
|
|
||||||
|
base_dc:continent
|
||||||
|
base_dc:country
|
||||||
|
base_dc:year (I used dc:date)
|
||||||
|
dc:coverage
|
||||||
|
dc:language (I used base_dc:lang)
|
||||||
|
base_dc:link (I used dc:identifier)
|
||||||
|
-->
|
||||||
|
|
||||||
|
<metadata>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:title" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:title'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:creator/replace(., '^(.*)\|.*$', '$1')" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:creator'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:contributor" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:contributor'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:description" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:description'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:subject" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:subject'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<!-- TODO: I'm not sure if this is the correct encoding -->
|
||||||
|
<xsl:for-each select="//base_dc:classcode|//base_dc:autoclasscode">
|
||||||
|
<dc:subject><xsl:value-of select="concat(@type, ':', .)" /></dc:subject>
|
||||||
|
</xsl:for-each>
|
||||||
|
<!-- END TODO -->
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:publisher" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:publisher'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:format" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:format'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:typenorm">
|
||||||
|
<dc:type>
|
||||||
|
<xsl:value-of select="vocabulary:clean(., 'base:normalized_types')" />
|
||||||
|
</dc:type>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:type" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:type'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:source" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:source'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<dc:language>
|
||||||
|
<xsl:value-of select="vocabulary:clean( //base_dc:lang, 'dnet:languages')" />
|
||||||
|
</dc:language>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:rights" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:rights'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:relation" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:relation'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:if test="not(//dc:identifier[starts-with(., 'http')])">
|
||||||
|
<xsl:call-template name="terminate" />
|
||||||
|
</xsl:if>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:identifier[starts-with(., 'http')]" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:identifier'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:for-each select="//dc:relation">
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varFP7, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varH2020, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:choose>
|
||||||
|
<!-- I used an inline mapping because the field typenorm could be repeated and I have to specify a list of priority -->
|
||||||
|
|
||||||
|
<!-- Book part -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '111'">
|
||||||
|
<dr:CobjCategory type="publication">0013</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Book -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '11'">
|
||||||
|
<dr:CobjCategory type="publication">0002</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Article contribution -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '121'">
|
||||||
|
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Journal/Newspaper -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '12'">
|
||||||
|
<dr:CobjCategory type="publication">0043</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Report -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '14'">
|
||||||
|
<dr:CobjCategory type="publication">0017</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Review -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '15'">
|
||||||
|
<dr:CobjCategory type="publication">0015</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Lecture -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '17'">
|
||||||
|
<dr:CobjCategory type="publication">0010</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Bachelor's thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '181'">
|
||||||
|
<dr:CobjCategory type="publication">0008</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Master's thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '182'">
|
||||||
|
<dr:CobjCategory type="publication">0007</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Doctoral and postdoctoral thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '183'">
|
||||||
|
<dr:CobjCategory type="publication">0006</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '18'">
|
||||||
|
<dr:CobjCategory type="publication">0044</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Patent -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '1A'">
|
||||||
|
<dr:CobjCategory type="publication">0019</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Text -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '1'">
|
||||||
|
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Software -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '6'">
|
||||||
|
<dr:CobjCategory type="software">0029</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Dataset -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '7'">
|
||||||
|
<dr:CobjCategory type="dataset">0021</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Still image -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '51'">
|
||||||
|
<dr:CobjCategory type="other">0025</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Moving image/Video -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '52'">
|
||||||
|
<dr:CobjCategory type="other">0024</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Image/Video -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '5'">
|
||||||
|
<dr:CobjCategory type="other">0033</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Audio -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '4'">
|
||||||
|
<dr:CobjCategory type="other">0030</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Musical notation -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '2'">
|
||||||
|
<dr:CobjCategory type="other">0020</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Map -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '3'">
|
||||||
|
<dr:CobjCategory type="other">0020</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Other non-article -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '122'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Course material -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '16'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Manuscript -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '19'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Conference object -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '13'">
|
||||||
|
<dr:CobjCategory type="publication">0004</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Unknown -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = 'F'">
|
||||||
|
<dr:CobjCategory type="other">0000</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>
|
||||||
|
<dr:CobjCategory type="other">0000</dr:CobjCategory>
|
||||||
|
</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
|
||||||
|
|
||||||
|
<oaf:accessrights>
|
||||||
|
<xsl:choose>
|
||||||
|
<xsl:when test="//base_dc:oa[.='0']">CLOSED</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:oa[.='1']">OPEN</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:oa[.='2']">UNKNOWN</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:rightsnorm">
|
||||||
|
<xsl:value-of select="vocabulary:clean(//base_dc:rightsnorm, 'dnet:access_modes')" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:when test="//dc:rights">
|
||||||
|
<xsl:value-of select="vocabulary:clean( //dc:rights, 'dnet:access_modes')" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>UNKNOWN</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
</oaf:accessrights>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:doi">
|
||||||
|
<oaf:identifier identifierType="doi">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'http') and (not(contains(., '://dx.doi.org/') or contains(., '://doi.org/') or contains(., '://hdl.handle.net/')))])">
|
||||||
|
<oaf:identifier identifierType="url">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'http') and contains(., '://hdl.handle.net/')]/substring-after(., 'hdl.handle.net/'))">
|
||||||
|
<oaf:identifier identifierType="handle">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'urn:nbn:nl:') or starts-with(., 'URN:NBN:NL:')])">
|
||||||
|
<oaf:identifier identifierType='urn'>
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<oaf:identifier identifierType="oai-original">
|
||||||
|
<xsl:value-of
|
||||||
|
select="//oai:header/oai:identifier" />
|
||||||
|
</oaf:identifier>
|
||||||
|
|
||||||
|
<oaf:hostedBy>
|
||||||
|
<xsl:attribute name="name">
|
||||||
|
<xsl:value-of select="//base_dc:collname" />
|
||||||
|
</xsl:attribute>
|
||||||
|
<xsl:attribute name="id">
|
||||||
|
<xsl:value-of select="concat('opendoar____::', //base_dc:collection/@opendoar_id)" />
|
||||||
|
</xsl:attribute>
|
||||||
|
</oaf:hostedBy>
|
||||||
|
|
||||||
|
<oaf:collectedFrom>
|
||||||
|
<xsl:attribute name="name">
|
||||||
|
<xsl:value-of select="$varOfficialName" />
|
||||||
|
</xsl:attribute>
|
||||||
|
<xsl:attribute name="id">
|
||||||
|
<xsl:value-of select="$varDataSourceId" />
|
||||||
|
</xsl:attribute>
|
||||||
|
</oaf:collectedFrom>
|
||||||
|
|
||||||
|
<oaf:dateAccepted>
|
||||||
|
<xsl:value-of select="dateCleaner:dateISO( //dc:date[1] )" />
|
||||||
|
</oaf:dateAccepted>
|
||||||
|
|
||||||
|
<xsl:if test="//base_dc:oa[.='1']">
|
||||||
|
<xsl:for-each select="//dc:relation[starts-with(., 'http')]">
|
||||||
|
<oaf:fulltext>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</oaf:fulltext>
|
||||||
|
</xsl:for-each>
|
||||||
|
</xsl:if>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:collection/@ror_id">
|
||||||
|
<oaf:relation relType="resultOrganization"
|
||||||
|
subRelType="affiliation"
|
||||||
|
relClass="hasAuthorInstitution"
|
||||||
|
targetType="organization">
|
||||||
|
<xsl:choose>
|
||||||
|
<xsl:when test="contains(.,'https://ror.org/')">
|
||||||
|
<xsl:value-of select="concat('ror_________::', normalize-space(.))" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>
|
||||||
|
<xsl:value-of select="concat('ror_________::https://ror.org/', normalize-space(.))" />
|
||||||
|
</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
</oaf:relation>
|
||||||
|
</xsl:for-each>
|
||||||
|
</metadata>
|
||||||
|
<xsl:copy-of select="//*[local-name() = 'about']" />
|
||||||
|
</record>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template name="allElements">
|
||||||
|
<xsl:param name="sourceElement" />
|
||||||
|
<xsl:param name="targetElement" />
|
||||||
|
<xsl:for-each select="$sourceElement">
|
||||||
|
<xsl:element name="{$targetElement}">
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</xsl:element>
|
||||||
|
</xsl:for-each>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="//*[local-name() = 'header']">
|
||||||
|
<xsl:if test="//oai:header/@status='deleted'">
|
||||||
|
<xsl:call-template name="terminate" />
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
<xsl:element name="dr:dateOfTransformation">
|
||||||
|
<xsl:value-of select="$transDate" />
|
||||||
|
</xsl:element>
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="node()|@*">
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
</xsl:stylesheet>
|
||||||
|
</CODE>
|
||||||
|
</SCRIPT>
|
||||||
|
</CONFIGURATION>
|
||||||
|
<STATUS />
|
||||||
|
<SECURITY_PARAMETERS />
|
||||||
|
</BODY>
|
||||||
|
</RESOURCE_PROFILE>
|
|
@ -0,0 +1,461 @@
|
||||||
|
<RESOURCE_PROFILE>
|
||||||
|
<HEADER>
|
||||||
|
<RESOURCE_IDENTIFIER value="2ad0cdd9-c96c-484c-8b0e-ed56d86891fe_VHJhbnNmb3JtYXRpb25SdWxlRFNSZXNvdXJjZXMvVHJhbnNmb3JtYXRpb25SdWxlRFNSZXNvdXJjZVR5cGU=" />
|
||||||
|
<RESOURCE_TYPE value="TransformationRuleDSResourceType" />
|
||||||
|
<RESOURCE_KIND value="TransformationRuleDSResources" />
|
||||||
|
<RESOURCE_URI value="" />
|
||||||
|
<DATE_OF_CREATION value="2024-03-05T11:23:00+00:00" />
|
||||||
|
</HEADER>
|
||||||
|
<BODY>
|
||||||
|
<CONFIGURATION>
|
||||||
|
<SOURCE_METADATA_FORMAT interpretation="cleaned" layout="store" name="dc" />
|
||||||
|
<SINK_METADATA_FORMAT name="odf_hbase" />
|
||||||
|
<IMPORTED />
|
||||||
|
<SCRIPT>
|
||||||
|
<TITLE>xslt_base2odf_hadoop</TITLE>
|
||||||
|
<CODE>
|
||||||
|
<xsl:stylesheet xmlns:oaire="http://namespace.openaire.eu/schema/oaire/" xmlns:dateCleaner="http://eu/dnetlib/transform/dateISO" xmlns:base_dc="http://oai.base-search.net/base_dc/"
|
||||||
|
xmlns:datacite="http://datacite.org/schema/kernel-4" xmlns:dr="http://www.driver-repository.eu/namespace/dr" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:vocabulary="http://eu/dnetlib/transform/clean" xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||||
|
xmlns:oai="http://www.openarchives.org/OAI/2.0/" xmlns:dri="http://www.driver-repository.eu/namespace/dri" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
exclude-result-prefixes="xsl vocabulary dateCleaner base_dc" version="2.0">
|
||||||
|
<xsl:param name="varOfficialName" />
|
||||||
|
<xsl:param name="varDataSourceId" />
|
||||||
|
<xsl:param name="varFP7" select="'corda_______::'" />
|
||||||
|
<xsl:param name="varH2020" select="'corda__h2020::'" />
|
||||||
|
<xsl:param name="repoCode" select="substring-before(//*[local-name() = 'header']/*[local-name()='recordIdentifier'], ':')" />
|
||||||
|
<xsl:param name="index" select="0" />
|
||||||
|
<xsl:param name="transDate" select="current-dateTime()" />
|
||||||
|
|
||||||
|
<xsl:template name="terminate">
|
||||||
|
<xsl:message terminate="yes">
|
||||||
|
record is not compliant, transformation is interrupted.
|
||||||
|
</xsl:message>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="/">
|
||||||
|
<record>
|
||||||
|
<xsl:apply-templates select="//*[local-name() = 'header']" />
|
||||||
|
|
||||||
|
|
||||||
|
<!-- NOT USED
|
||||||
|
base_dc:global_id (I used oai:identifier)
|
||||||
|
base_dc:collection/text()
|
||||||
|
base_dc:continent
|
||||||
|
base_dc:country
|
||||||
|
dc:coverage
|
||||||
|
dc:source
|
||||||
|
dc:relation
|
||||||
|
dc:type (I used //base_dc:typenorm)
|
||||||
|
dc:language (I used base_dc:lang)
|
||||||
|
base_dc:link (I used dc:identifier)
|
||||||
|
-->
|
||||||
|
|
||||||
|
<metadata>
|
||||||
|
<datacite:resource>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:doi">
|
||||||
|
<datacite:identifier identifierType="DOI">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</datacite:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<datacite:alternateIdentifiers>
|
||||||
|
<xsl:for-each
|
||||||
|
select="distinct-values(//dc:identifier[starts-with(., 'http') and (not(contains(., '://dx.doi.org/') or contains(., '://doi.org/') or contains(., '://hdl.handle.net/')))])">
|
||||||
|
<datacite:identifier alternateIdentifierType="url">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</datacite:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'http') and contains(., '://hdl.handle.net/')]/substring-after(., 'hdl.handle.net/'))">
|
||||||
|
<datacite:identifier alternateIdentifierType="handle">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</datacite:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'urn:nbn:nl:') or starts-with(., 'URN:NBN:NL:')])">
|
||||||
|
<datacite:identifier alternateIdentifierType='urn'>
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</datacite:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<datacite:identifier alternateIdentifierType="oai-original">
|
||||||
|
<xsl:value-of
|
||||||
|
select="//oai:header/oai:identifier" />
|
||||||
|
</datacite:identifier>
|
||||||
|
|
||||||
|
</datacite:alternateIdentifiers>
|
||||||
|
|
||||||
|
<datacite:relatedIdentifiers />
|
||||||
|
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:typenorm">
|
||||||
|
<datacite:resourceType><xsl:value-of select="vocabulary:clean(., 'base:normalized_types')" /></datacite:resourceType>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<datacite:titles>
|
||||||
|
<xsl:for-each select="//dc:title">
|
||||||
|
<datacite:title>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:title>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:titles>
|
||||||
|
|
||||||
|
<datacite:creators>
|
||||||
|
<xsl:for-each select="//dc:creator">
|
||||||
|
<xsl:variable name="author" select="normalize-space(.)" />
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>
|
||||||
|
<xsl:value-of select="$author" />
|
||||||
|
</datacite:creatorName>
|
||||||
|
<xsl:for-each select="//base_dc:authod_id[normalize-space(./base_dc:creator_name) = $author]/base_dc:creator_id ">
|
||||||
|
<xsl:if test="contains(.,'https://orcid.org/')">
|
||||||
|
<nameIdentifier schemeURI="https://orcid.org/" nameIdentifierScheme="ORCID">
|
||||||
|
<xsl:value-of select="substring-after(., 'https://orcid.org/')" />
|
||||||
|
</nameIdentifier>
|
||||||
|
</xsl:if>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:creator>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:creators>
|
||||||
|
|
||||||
|
<datacite:contributors>
|
||||||
|
<xsl:for-each select="//dc:contributor">
|
||||||
|
<datacite:contributor>
|
||||||
|
<datacite:contributorName>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:contributorName>
|
||||||
|
</datacite:contributor>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:contributors>
|
||||||
|
|
||||||
|
<datacite:descriptions>
|
||||||
|
<xsl:for-each select="//dc:description">
|
||||||
|
<datacite:description descriptionType="Abstract">
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:description>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:descriptions>
|
||||||
|
|
||||||
|
<datacite:subjects>
|
||||||
|
<xsl:for-each select="//dc:subject">
|
||||||
|
<datacite:subject>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:subject>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:classcode|//base_dc:autoclasscode">
|
||||||
|
<datacite:subject subjectScheme="{@type}" classificationCode="{normalize-space(.)}">
|
||||||
|
<!-- TODO the value should be obtained by the Code -->
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:subject>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:subjects>
|
||||||
|
|
||||||
|
<xsl:for-each select="//dc:publisher">
|
||||||
|
<datacite:publisher>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:publisher>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:year">
|
||||||
|
<datacite:publicationYear>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:publicationYear>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<datacite:formats>
|
||||||
|
<xsl:for-each select="//dc:format">
|
||||||
|
<datacite:format>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</datacite:format>
|
||||||
|
</xsl:for-each>
|
||||||
|
</datacite:formats>
|
||||||
|
|
||||||
|
<datacite:language>
|
||||||
|
<xsl:value-of select="vocabulary:clean( //base_dc:lang, 'dnet:languages')" />
|
||||||
|
</datacite:language>
|
||||||
|
|
||||||
|
<oaf:accessrights>
|
||||||
|
<xsl:if test="//base_dc:oa[.='0']">
|
||||||
|
<datacite:rights rightsURI="http://purl.org/coar/access_right/c_16ec">restricted access</datacite:rights>
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:if test="//base_dc:oa[.='1']">
|
||||||
|
<datacite:rights rightsURI="http://purl.org/coar/access_right/c_abf2">open access</datacite:rights>
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:for-each select="//dc:rights|//base_dc:rightsnorm">
|
||||||
|
<datacite:rights><xsl:value-of select="vocabulary:clean(., 'dnet:access_modes')" /></datacite:rights>
|
||||||
|
</xsl:for-each>
|
||||||
|
</oaf:accessrights>
|
||||||
|
|
||||||
|
</datacite:resource>
|
||||||
|
|
||||||
|
<xsl:for-each select="//dc:relation">
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varFP7, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varH2020, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:choose>
|
||||||
|
<!-- I used an inline mapping because the field typenorm could be repeated and I have to specify a list of priority -->
|
||||||
|
|
||||||
|
<!-- Book part -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '111'">
|
||||||
|
<dr:CobjCategory type="publication">0013</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Book -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '11'">
|
||||||
|
<dr:CobjCategory type="publication">0002</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Article contribution -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '121'">
|
||||||
|
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Journal/Newspaper -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '12'">
|
||||||
|
<dr:CobjCategory type="publication">0043</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Report -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '14'">
|
||||||
|
<dr:CobjCategory type="publication">0017</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Review -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '15'">
|
||||||
|
<dr:CobjCategory type="publication">0015</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Lecture -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '17'">
|
||||||
|
<dr:CobjCategory type="publication">0010</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Bachelor's thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '181'">
|
||||||
|
<dr:CobjCategory type="publication">0008</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Master's thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '182'">
|
||||||
|
<dr:CobjCategory type="publication">0007</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Doctoral and postdoctoral thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '183'">
|
||||||
|
<dr:CobjCategory type="publication">0006</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Thesis -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '18'">
|
||||||
|
<dr:CobjCategory type="publication">0044</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Patent -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '1A'">
|
||||||
|
<dr:CobjCategory type="publication">0019</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Text -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '1'">
|
||||||
|
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Software -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '6'">
|
||||||
|
<dr:CobjCategory type="software">0029</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Dataset -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '7'">
|
||||||
|
<dr:CobjCategory type="dataset">0021</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Still image -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '51'">
|
||||||
|
<dr:CobjCategory type="other">0025</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Moving image/Video -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '52'">
|
||||||
|
<dr:CobjCategory type="other">0024</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Image/Video -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '5'">
|
||||||
|
<dr:CobjCategory type="other">0033</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Audio -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '4'">
|
||||||
|
<dr:CobjCategory type="other">0030</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Musical notation -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '2'">
|
||||||
|
<dr:CobjCategory type="other">0020</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Map -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '3'">
|
||||||
|
<dr:CobjCategory type="other">0020</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Other non-article -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '122'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Course material -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '16'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Manuscript -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '19'">
|
||||||
|
<dr:CobjCategory type="publication">0038</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Conference object -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = '13'">
|
||||||
|
<dr:CobjCategory type="publication">0004</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
|
||||||
|
<!-- Unknown -->
|
||||||
|
<xsl:when test="//base_dc:typenorm = 'F'">
|
||||||
|
<dr:CobjCategory type="other">0000</dr:CobjCategory>
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>
|
||||||
|
<dr:CobjCategory type="other">0000</dr:CobjCategory>
|
||||||
|
</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
|
||||||
|
<oaf:accessrights>
|
||||||
|
<xsl:choose>
|
||||||
|
<xsl:when test="//base_dc:oa[.='0']">CLOSED</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:oa[.='1']">OPEN</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:oa[.='2']">UNKNOWN</xsl:when>
|
||||||
|
<xsl:when test="//base_dc:rightsnorm">
|
||||||
|
<xsl:value-of select="vocabulary:clean(//base_dc:rightsnorm, 'dnet:access_modes')" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:when test="//dc:rights">
|
||||||
|
<xsl:value-of select="vocabulary:clean( //dc:rights, 'dnet:access_modes')" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>UNKNOWN</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
</oaf:accessrights>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:doi">
|
||||||
|
<oaf:identifier identifierType="doi">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each
|
||||||
|
select="distinct-values(//dc:identifier[starts-with(., 'http') and ( not(contains(., '://dx.doi.org/') or contains(., '://doi.org/') or contains(., '://hdl.handle.net/')))])">
|
||||||
|
<oaf:identifier identifierType="url">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'http') and contains(., '://hdl.handle.net/')]/substring-after(., 'hdl.handle.net/'))">
|
||||||
|
<oaf:identifier identifierType="handle">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="distinct-values(//dc:identifier[starts-with(., 'urn:nbn:nl:') or starts-with(., 'URN:NBN:NL:')])">
|
||||||
|
<oaf:identifier identifierType='urn'>
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<oaf:identifier identifierType="oai-original">
|
||||||
|
<xsl:value-of
|
||||||
|
select="//oai:header/oai:identifier" />
|
||||||
|
</oaf:identifier>
|
||||||
|
|
||||||
|
<oaf:hostedBy>
|
||||||
|
<xsl:attribute name="name">
|
||||||
|
<xsl:value-of select="//base_dc:collname" />
|
||||||
|
</xsl:attribute>
|
||||||
|
<xsl:attribute name="id">
|
||||||
|
<xsl:value-of select="concat('opendoar____::', //base_dc:collection/@opendoar_id)" />
|
||||||
|
</xsl:attribute>
|
||||||
|
</oaf:hostedBy>
|
||||||
|
|
||||||
|
<oaf:collectedFrom>
|
||||||
|
<xsl:attribute name="name">
|
||||||
|
<xsl:value-of select="$varOfficialName" />
|
||||||
|
</xsl:attribute>
|
||||||
|
<xsl:attribute name="id">
|
||||||
|
<xsl:value-of select="$varDataSourceId" />
|
||||||
|
</xsl:attribute>
|
||||||
|
</oaf:collectedFrom>
|
||||||
|
|
||||||
|
<oaf:dateAccepted>
|
||||||
|
<xsl:value-of select="dateCleaner:dateISO( //dc:date[1] )" />
|
||||||
|
</oaf:dateAccepted>
|
||||||
|
|
||||||
|
<xsl:if test="//base_dc:oa[.='1']">
|
||||||
|
<xsl:for-each select="//dc:relation[starts-with(., 'http')]">
|
||||||
|
<oaf:fulltext>
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</oaf:fulltext>
|
||||||
|
</xsl:for-each>
|
||||||
|
</xsl:if>
|
||||||
|
|
||||||
|
<xsl:for-each select="//base_dc:collection/@ror_id">
|
||||||
|
<oaf:relation relType="resultOrganization" subRelType="affiliation" relClass="hasAuthorInstitution" targetType="organization">
|
||||||
|
<xsl:choose>
|
||||||
|
<xsl:when test="contains(.,'https://ror.org/')">
|
||||||
|
<xsl:value-of select="concat('ror_________::', normalize-space(.))" />
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:otherwise>
|
||||||
|
<xsl:value-of select="concat('ror_________::https://ror.org/', normalize-space(.))" />
|
||||||
|
</xsl:otherwise>
|
||||||
|
</xsl:choose>
|
||||||
|
</oaf:relation>
|
||||||
|
</xsl:for-each>
|
||||||
|
</metadata>
|
||||||
|
<xsl:copy-of select="//*[local-name() = 'about']" />
|
||||||
|
</record>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="//*[local-name() = 'header']">
|
||||||
|
<xsl:if test="//oai:header/@status='deleted'">
|
||||||
|
<xsl:call-template name="terminate" />
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
<xsl:element name="dr:dateOfTransformation">
|
||||||
|
<xsl:value-of select="$transDate" />
|
||||||
|
</xsl:element>
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="node()|@*">
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
</xsl:stylesheet>
|
||||||
|
</CODE>
|
||||||
|
</SCRIPT>
|
||||||
|
</CONFIGURATION>
|
||||||
|
<STATUS />
|
||||||
|
<SECURITY_PARAMETERS />
|
||||||
|
</BODY>
|
||||||
|
</RESOURCE_PROFILE>
|
|
@ -0,0 +1,120 @@
|
||||||
|
package eu.dnetlib.dhp.collection.orcid
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.AbstractScalaApplication
|
||||||
|
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
|
||||||
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
|
class SparkApplyUpdate(propertyPath: String, args: Array[String], log: Logger)
|
||||||
|
extends AbstractScalaApplication(propertyPath, args, log: Logger) {
|
||||||
|
|
||||||
|
/** Here all the spark applications runs this method
|
||||||
|
* where the whole logic of the spark node is defined
|
||||||
|
*/
|
||||||
|
override def run(): Unit = {
|
||||||
|
|
||||||
|
val graphPath: String = parser.get("graphPath")
|
||||||
|
log.info("found parameters graphPath: {}", graphPath)
|
||||||
|
val updatePath: String = parser.get("updatePath")
|
||||||
|
log.info("found parameters updatePath: {}", updatePath)
|
||||||
|
val targetPath: String = parser.get("targetPath")
|
||||||
|
log.info("found parameters targetPath: {}", targetPath)
|
||||||
|
applyTableUpdate(spark, graphPath, updatePath, targetPath)
|
||||||
|
checkUpdate(spark, graphPath, targetPath)
|
||||||
|
moveTable(spark, graphPath, targetPath)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private def moveTable(spark: SparkSession, graphPath: String, updatePath: String): Unit = {
|
||||||
|
spark.read
|
||||||
|
.load(s"$updatePath/Authors")
|
||||||
|
.repartition(1000)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$graphPath/Authors")
|
||||||
|
spark.read
|
||||||
|
.load(s"$updatePath/Works")
|
||||||
|
.repartition(1000)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$graphPath/Works")
|
||||||
|
spark.read
|
||||||
|
.load(s"$updatePath/Employments")
|
||||||
|
.repartition(1000)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$graphPath/Employments")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private def updateDataset(
|
||||||
|
inputDataset: DataFrame,
|
||||||
|
idUpdate: DataFrame,
|
||||||
|
updateDataframe: DataFrame,
|
||||||
|
targetPath: String
|
||||||
|
): Unit = {
|
||||||
|
inputDataset
|
||||||
|
.join(idUpdate, inputDataset("orcid").equalTo(idUpdate("orcid")), "leftanti")
|
||||||
|
.select(inputDataset("*"))
|
||||||
|
.unionByName(updateDataframe)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(targetPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def checkUpdate(spark: SparkSession, graphPath: String, updatePath: String): Unit = {
|
||||||
|
val totalOriginalAuthors = spark.read.load(s"$graphPath/Authors").count
|
||||||
|
val totalOriginalWorks = spark.read.load(s"$graphPath/Works").count
|
||||||
|
val totalOriginalEmployments = spark.read.load(s"$graphPath/Employments").count
|
||||||
|
val totalUpdateAuthors = spark.read.load(s"$updatePath/Authors").count
|
||||||
|
val totalUpdateWorks = spark.read.load(s"$updatePath/Works").count
|
||||||
|
val totalUpdateEmployments = spark.read.load(s"$updatePath/Employments").count
|
||||||
|
|
||||||
|
log.info("totalOriginalAuthors: {}", totalOriginalAuthors)
|
||||||
|
log.info("totalOriginalWorks: {}", totalOriginalWorks)
|
||||||
|
log.info("totalOriginalEmployments: {}", totalOriginalEmployments)
|
||||||
|
log.info("totalUpdateAuthors: {}", totalUpdateAuthors)
|
||||||
|
log.info("totalUpdateWorks: {}", totalUpdateWorks)
|
||||||
|
log.info("totalUpdateEmployments: {}", totalUpdateEmployments)
|
||||||
|
if (
|
||||||
|
totalUpdateAuthors < totalOriginalAuthors || totalUpdateEmployments < totalOriginalEmployments || totalUpdateWorks < totalOriginalWorks
|
||||||
|
)
|
||||||
|
throw new RuntimeException("The updated Graph contains less elements of the original one")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private def applyTableUpdate(spark: SparkSession, graphPath: String, updatePath: String, targetPath: String): Unit = {
|
||||||
|
val orcidIDUpdate = spark.read.load(s"$updatePath/Authors").select("orcid")
|
||||||
|
updateDataset(
|
||||||
|
spark.read.load(s"$graphPath/Authors"),
|
||||||
|
orcidIDUpdate,
|
||||||
|
spark.read.load(s"$updatePath/Authors"),
|
||||||
|
s"$targetPath/Authors"
|
||||||
|
)
|
||||||
|
updateDataset(
|
||||||
|
spark.read.load(s"$graphPath/Employments"),
|
||||||
|
orcidIDUpdate,
|
||||||
|
spark.read.load(s"$updatePath/Employments"),
|
||||||
|
s"$targetPath/Employments"
|
||||||
|
)
|
||||||
|
updateDataset(
|
||||||
|
spark.read.load(s"$graphPath/Works"),
|
||||||
|
orcidIDUpdate,
|
||||||
|
spark.read.load(s"$updatePath/Works"),
|
||||||
|
s"$targetPath/Works"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
object SparkApplyUpdate {
|
||||||
|
|
||||||
|
val log: Logger = LoggerFactory.getLogger(SparkGenerateORCIDTable.getClass)
|
||||||
|
|
||||||
|
def main(args: Array[String]): Unit = {
|
||||||
|
|
||||||
|
new SparkApplyUpdate("/eu/dnetlib/dhp/collection/orcid/apply_orcid_table_parameter.json", args, log)
|
||||||
|
.initialize()
|
||||||
|
.run()
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ import org.apache.hadoop.io.Text
|
||||||
import org.apache.spark.SparkContext
|
import org.apache.spark.SparkContext
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
import scala.collection.JavaConverters._
|
||||||
|
|
||||||
class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Logger)
|
class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Logger)
|
||||||
extends AbstractScalaApplication(propertyPath, args, log: Logger) {
|
extends AbstractScalaApplication(propertyPath, args, log: Logger) {
|
||||||
|
@ -18,12 +19,16 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
log.info("found parameters sourcePath: {}", sourcePath)
|
log.info("found parameters sourcePath: {}", sourcePath)
|
||||||
val targetPath: String = parser.get("targetPath")
|
val targetPath: String = parser.get("targetPath")
|
||||||
log.info("found parameters targetPath: {}", targetPath)
|
log.info("found parameters targetPath: {}", targetPath)
|
||||||
extractORCIDTable(spark, sourcePath, targetPath)
|
val fromUpdate = "true".equals(parser.get("fromUpdate"))
|
||||||
extractORCIDEmploymentsTable(spark, sourcePath, targetPath)
|
val sourceSummaryPath = if (fromUpdate) s"$sourcePath/summary*" else sourcePath
|
||||||
extractORCIDWorksTable(spark, sourcePath, targetPath)
|
val sourceEmploymentsPath = if (fromUpdate) s"$sourcePath/employments*" else sourcePath
|
||||||
|
val sourceWorksPath = if (fromUpdate) s"$sourcePath/works*" else sourcePath
|
||||||
|
extractORCIDTable(spark, sourceSummaryPath, targetPath, fromUpdate)
|
||||||
|
extractORCIDEmploymentsTable(spark, sourceEmploymentsPath, targetPath, fromUpdate)
|
||||||
|
extractORCIDWorksTable(spark, sourceWorksPath, targetPath, fromUpdate)
|
||||||
}
|
}
|
||||||
|
|
||||||
def extractORCIDTable(spark: SparkSession, sourcePath: String, targetPath: String): Unit = {
|
def extractORCIDTable(spark: SparkSession, sourcePath: String, targetPath: String, skipFilterByKey: Boolean): Unit = {
|
||||||
val sc: SparkContext = spark.sparkContext
|
val sc: SparkContext = spark.sparkContext
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val df = sc
|
val df = sc
|
||||||
|
@ -32,8 +37,8 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.toDF
|
.toDF
|
||||||
.as[(String, String)]
|
.as[(String, String)]
|
||||||
implicit val orcidAuthor: Encoder[Author] = Encoders.bean(classOf[Author])
|
implicit val orcidAuthor: Encoder[Author] = Encoders.bean(classOf[Author])
|
||||||
// implicit val orcidPID:Encoder[Pid] = Encoders.bean(classOf[Pid])
|
val newDf = if (!skipFilterByKey) df.filter(r => r._1.contains("summaries")) else df
|
||||||
df.filter(r => r._1.contains("summaries"))
|
newDf
|
||||||
.map { r =>
|
.map { r =>
|
||||||
val p = new OrcidParser
|
val p = new OrcidParser
|
||||||
p.parseSummary(r._2)
|
p.parseSummary(r._2)
|
||||||
|
@ -44,7 +49,12 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.save(s"$targetPath/Authors")
|
.save(s"$targetPath/Authors")
|
||||||
}
|
}
|
||||||
|
|
||||||
def extractORCIDWorksTable(spark: SparkSession, sourcePath: String, targetPath: String): Unit = {
|
def extractORCIDWorksTable(
|
||||||
|
spark: SparkSession,
|
||||||
|
sourcePath: String,
|
||||||
|
targetPath: String,
|
||||||
|
skipFilterByKey: Boolean
|
||||||
|
): Unit = {
|
||||||
val sc: SparkContext = spark.sparkContext
|
val sc: SparkContext = spark.sparkContext
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val df = sc
|
val df = sc
|
||||||
|
@ -53,7 +63,19 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.toDF
|
.toDF
|
||||||
.as[(String, String)]
|
.as[(String, String)]
|
||||||
implicit val orcidWorkAuthor: Encoder[Work] = Encoders.bean(classOf[Work])
|
implicit val orcidWorkAuthor: Encoder[Work] = Encoders.bean(classOf[Work])
|
||||||
implicit val orcidPID: Encoder[Pid] = Encoders.bean(classOf[Pid])
|
|
||||||
|
//We are in the case of parsing ORCID UPDATE
|
||||||
|
if (skipFilterByKey) {
|
||||||
|
df.flatMap { r =>
|
||||||
|
val p = new OrcidParser
|
||||||
|
p.parseWorks(r._2).asScala
|
||||||
|
}.filter(p => p != null)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$targetPath/Works")
|
||||||
|
}
|
||||||
|
//We are in the case of parsing ORCID DUMP
|
||||||
|
else {
|
||||||
df.filter(r => r._1.contains("works"))
|
df.filter(r => r._1.contains("works"))
|
||||||
.map { r =>
|
.map { r =>
|
||||||
val p = new OrcidParser
|
val p = new OrcidParser
|
||||||
|
@ -64,8 +86,14 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.save(s"$targetPath/Works")
|
.save(s"$targetPath/Works")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def extractORCIDEmploymentsTable(spark: SparkSession, sourcePath: String, targetPath: String): Unit = {
|
def extractORCIDEmploymentsTable(
|
||||||
|
spark: SparkSession,
|
||||||
|
sourcePath: String,
|
||||||
|
targetPath: String,
|
||||||
|
skipFilterByKey: Boolean
|
||||||
|
): Unit = {
|
||||||
val sc: SparkContext = spark.sparkContext
|
val sc: SparkContext = spark.sparkContext
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val df = sc
|
val df = sc
|
||||||
|
@ -74,7 +102,17 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.toDF
|
.toDF
|
||||||
.as[(String, String)]
|
.as[(String, String)]
|
||||||
implicit val orcidEmploymentAuthor: Encoder[Employment] = Encoders.bean(classOf[Employment])
|
implicit val orcidEmploymentAuthor: Encoder[Employment] = Encoders.bean(classOf[Employment])
|
||||||
implicit val orcidPID: Encoder[Pid] = Encoders.bean(classOf[Pid])
|
if (skipFilterByKey) {
|
||||||
|
df.flatMap { r =>
|
||||||
|
val p = new OrcidParser
|
||||||
|
p.parseEmployments(r._2).asScala
|
||||||
|
}.filter(p => p != null)
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$targetPath/Employments")
|
||||||
|
}
|
||||||
|
//We are in the case of parsing ORCID DUMP
|
||||||
|
else {
|
||||||
df.filter(r => r._1.contains("employments"))
|
df.filter(r => r._1.contains("employments"))
|
||||||
.map { r =>
|
.map { r =>
|
||||||
val p = new OrcidParser
|
val p = new OrcidParser
|
||||||
|
@ -86,6 +124,7 @@ class SparkGenerateORCIDTable(propertyPath: String, args: Array[String], log: Lo
|
||||||
.save(s"$targetPath/Employments")
|
.save(s"$targetPath/Employments")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
object SparkGenerateORCIDTable {
|
object SparkGenerateORCIDTable {
|
||||||
|
|
||||||
|
|
|
@ -78,10 +78,6 @@ public class PrepareAffiliationRelationsTest {
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json")
|
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
String pubmedAffiliationRelationsPath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json")
|
|
||||||
.getPath();
|
|
||||||
|
|
||||||
String outputPath = workingDir.toString() + "/actionSet";
|
String outputPath = workingDir.toString() + "/actionSet";
|
||||||
|
|
||||||
PrepareAffiliationRelations
|
PrepareAffiliationRelations
|
||||||
|
@ -89,7 +85,8 @@ public class PrepareAffiliationRelationsTest {
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-crossrefInputPath", crossrefAffiliationRelationPath,
|
"-crossrefInputPath", crossrefAffiliationRelationPath,
|
||||||
"-pubmedInputPath", pubmedAffiliationRelationsPath,
|
"-pubmedInputPath", crossrefAffiliationRelationPath,
|
||||||
|
"-openapcInputPath", crossrefAffiliationRelationPath,
|
||||||
"-outputPath", outputPath
|
"-outputPath", outputPath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -106,7 +103,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
// );
|
// );
|
||||||
// }
|
// }
|
||||||
// count the number of relations
|
// count the number of relations
|
||||||
assertEquals(40, tmp.count());
|
assertEquals(60, tmp.count());
|
||||||
|
|
||||||
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
dataset.createOrReplaceTempView("result");
|
dataset.createOrReplaceTempView("result");
|
||||||
|
@ -117,7 +114,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
// verify that we have equal number of bi-directional relations
|
// verify that we have equal number of bi-directional relations
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
20, execVerification
|
30, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
|
@ -125,7 +122,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
20, execVerification
|
30, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.fosnodoi;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.GetFOSSparkJob;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareTest;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.ProduceTest;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 13/02/23
|
||||||
|
*/
|
||||||
|
public class GetFosTest {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ProduceTest.class);
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static SparkSession spark;
|
||||||
|
private static LocalFileSystem fs;
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(PrepareTest.class.getSimpleName());
|
||||||
|
|
||||||
|
fs = FileSystem.getLocal(new Configuration());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(ProduceTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(PrepareTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test3() throws Exception {
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/fosnodoi/fosnodoi.csv")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
final String outputPath = workingDir.toString() + "/fos.json";
|
||||||
|
GetFOSSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
|
||||||
|
"-outputPath", outputPath,
|
||||||
|
"-delimiter", ","
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<FOSDataModel> tmp = sc
|
||||||
|
.textFile(outputPath)
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, FOSDataModel.class));
|
||||||
|
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getOaid() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel1() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel2() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
|
||||||
|
|
||||||
|
tmp.foreach(t -> System.out.println(new ObjectMapper().writeValueAsString(t)));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,99 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.fosnodoi;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareFOSSparkJob;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareSDGSparkJob;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.ProduceTest;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class PrepareTest {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ProduceTest.class);
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static SparkSession spark;
|
||||||
|
private static LocalFileSystem fs;
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(PrepareTest.class.getSimpleName());
|
||||||
|
|
||||||
|
fs = FileSystem.getLocal(new Configuration());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(ProduceTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(PrepareTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void fosPrepareTest() throws Exception {
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/fosnodoi/fosnodoi.json")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
PrepareFOSSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
|
||||||
|
"-outputPath", workingDir.toString() + "/work",
|
||||||
|
"-distributeDoi", Boolean.FALSE.toString()
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Result> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/work/fos")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
||||||
|
|
||||||
|
tmp.foreach(t -> System.out.println(new ObjectMapper().writeValueAsString(t)));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -76,7 +76,7 @@ public class CreateOpenCitationsASTest {
|
||||||
|
|
||||||
String inputPath = getClass()
|
String inputPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI/inputremap/jsonforas")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
CreateActionSetSparkJob
|
CreateActionSetSparkJob
|
||||||
|
@ -84,8 +84,6 @@ public class CreateOpenCitationsASTest {
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged",
|
"-isSparkSessionManaged",
|
||||||
Boolean.FALSE.toString(),
|
Boolean.FALSE.toString(),
|
||||||
"-shouldDuplicateRels",
|
|
||||||
Boolean.TRUE.toString(),
|
|
||||||
"-inputPath",
|
"-inputPath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"-outputPath",
|
||||||
|
@ -99,9 +97,10 @@ public class CreateOpenCitationsASTest {
|
||||||
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
.map(aa -> ((Relation) aa.getPayload()));
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
assertEquals(31, tmp.count());
|
Assertions.assertEquals(27, tmp.count());
|
||||||
|
tmp.foreach(r -> Assertions.assertEquals(1, r.getCollectedfrom().size()));
|
||||||
|
|
||||||
// tmp.foreach(r -> System.out.println(OBJECT_MAPPER.writeValueAsString(r)));
|
tmp.foreach(r -> System.out.println(OBJECT_MAPPER.writeValueAsString(r)));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,90 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.opencitations;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 07/03/24
|
||||||
|
*/
|
||||||
|
public class RemapTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(RemapTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(RemapTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(RemapTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(RemapTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRemap() throws Exception {
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI/inputremap")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
MapOCIdsInPids
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/out/",
|
||||||
|
"-nameNode", "input1;input2;input3;input4;input5"
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,324 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.transformativeagreement;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.opencitations.CreateOpenCitationsASTest;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 13/02/24
|
||||||
|
*/
|
||||||
|
public class CreateTAActionSetTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(CreateOpenCitationsASTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(CreateTAActionSetTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(CreateTAActionSetTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(CreateTAActionSetTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void createActionSet() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/transformativeagreement/facts.json")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.transformativeagreement.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet1"
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testNumberofRelations2() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet2"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet2", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
assertEquals(23, tmp.count());
|
||||||
|
|
||||||
|
// tmp.foreach(r -> System.out.println(OBJECT_MAPPER.writeValueAsString(r)));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRelationsCollectedFrom() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet3"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet3", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
tmp.foreach(r -> {
|
||||||
|
assertEquals(ModelConstants.OPENOCITATIONS_NAME, r.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(ModelConstants.OPENOCITATIONS_ID, r.getCollectedfrom().get(0).getKey());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRelationsDataInfo() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet4"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet4", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
tmp.foreach(r -> {
|
||||||
|
assertEquals(false, r.getDataInfo().getInferred());
|
||||||
|
assertEquals(false, r.getDataInfo().getDeletedbyinference());
|
||||||
|
assertEquals("0.91", r.getDataInfo().getTrust());
|
||||||
|
assertEquals(
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob.OPENCITATIONS_CLASSID,
|
||||||
|
r.getDataInfo().getProvenanceaction().getClassid());
|
||||||
|
assertEquals(
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob.OPENCITATIONS_CLASSNAME,
|
||||||
|
r.getDataInfo().getProvenanceaction().getClassname());
|
||||||
|
assertEquals(ModelConstants.DNET_PROVENANCE_ACTIONS, r.getDataInfo().getProvenanceaction().getSchemeid());
|
||||||
|
assertEquals(ModelConstants.DNET_PROVENANCE_ACTIONS, r.getDataInfo().getProvenanceaction().getSchemename());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRelationsSemantics() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet5"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet5", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
tmp.foreach(r -> {
|
||||||
|
assertEquals("citation", r.getSubRelType());
|
||||||
|
assertEquals("resultResult", r.getRelType());
|
||||||
|
});
|
||||||
|
assertEquals(23, tmp.filter(r -> r.getRelClass().equals("Cites")).count());
|
||||||
|
assertEquals(0, tmp.filter(r -> r.getRelClass().equals("IsCitedBy")).count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRelationsSourceTargetPrefix() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
eu.dnetlib.dhp.actionmanager.opencitations.CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet6"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet6", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
tmp.foreach(r -> {
|
||||||
|
assertEquals("50|doi_________::", r.getSource().substring(0, 17));
|
||||||
|
assertEquals("50|doi_________::", r.getTarget().substring(0, 17));
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRelationsSourceTargetCouple() throws Exception {
|
||||||
|
final String doi1 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-015-3684-x"));
|
||||||
|
final String doi2 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1111/j.1551-2916.2008.02408.x"));
|
||||||
|
final String doi3 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-014-2114-9"));
|
||||||
|
final String doi4 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/j.ceramint.2013.09.069"));
|
||||||
|
final String doi5 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-009-9913-4"));
|
||||||
|
final String doi6 = "50|doi_________::"
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/0038-1098(72)90370-5"));
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/opencitations/COCI")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
CreateActionSetSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet7"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet7", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
JavaRDD<Relation> check = tmp.filter(r -> r.getSource().equals(doi1) || r.getTarget().equals(doi1));
|
||||||
|
|
||||||
|
assertEquals(5, check.count());
|
||||||
|
|
||||||
|
// check.foreach(r -> {
|
||||||
|
// if (r.getSource().equals(doi2) || r.getSource().equals(doi3) || r.getSource().equals(doi4) ||
|
||||||
|
// r.getSource().equals(doi5) || r.getSource().equals(doi6)) {
|
||||||
|
// assertEquals(ModelConstants.IS_CITED_BY, r.getRelClass());
|
||||||
|
// assertEquals(doi1, r.getTarget());
|
||||||
|
// }
|
||||||
|
// });
|
||||||
|
|
||||||
|
assertEquals(5, check.filter(r -> r.getSource().equals(doi1)).count());
|
||||||
|
check.filter(r -> r.getSource().equals(doi1)).foreach(r -> assertEquals(ModelConstants.CITES, r.getRelClass()));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,6 +2,7 @@
|
||||||
package eu.dnetlib.dhp.collection.orcid;
|
package eu.dnetlib.dhp.collection.orcid;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -9,7 +10,12 @@ import java.util.Objects;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
|
import org.apache.hadoop.io.compress.CompressionCodecFactory;
|
||||||
import org.apache.spark.SparkContext;
|
import org.apache.spark.SparkContext;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -27,6 +33,7 @@ import com.ximpleware.XPathParseException;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.collection.orcid.model.Author;
|
import eu.dnetlib.dhp.collection.orcid.model.Author;
|
||||||
import eu.dnetlib.dhp.collection.orcid.model.ORCIDItem;
|
import eu.dnetlib.dhp.collection.orcid.model.ORCIDItem;
|
||||||
|
import eu.dnetlib.dhp.collection.orcid.model.Work;
|
||||||
import eu.dnetlib.dhp.parser.utility.VtdException;
|
import eu.dnetlib.dhp.parser.utility.VtdException;
|
||||||
|
|
||||||
public class DownloadORCIDTest {
|
public class DownloadORCIDTest {
|
||||||
|
@ -82,6 +89,34 @@ public class DownloadORCIDTest {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParsingOrcidUpdateEmployments() throws Exception {
|
||||||
|
final String xml = IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
getClass().getResourceAsStream("/eu/dnetlib/dhp/collection/orcid/update_employments.xml")));
|
||||||
|
|
||||||
|
final OrcidParser parser = new OrcidParser();
|
||||||
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
System.out.println(mapper.writeValueAsString(parser.parseEmployments(xml)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParsingOrcidUpdateWorks() throws Exception {
|
||||||
|
final String xml = IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
getClass().getResourceAsStream("/eu/dnetlib/dhp/collection/orcid/update_work.xml")));
|
||||||
|
|
||||||
|
final OrcidParser parser = new OrcidParser();
|
||||||
|
final List<Work> works = parser.parseWorks(xml);
|
||||||
|
|
||||||
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
System.out.println(mapper.writeValueAsString(works));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParsingEmployments() throws Exception {
|
public void testParsingEmployments() throws Exception {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class BaseCollectionInfo implements Serializable {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 5766333937429419647L;
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
private String opendoarId;
|
||||||
|
private String rorId;
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return this.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(final String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOpendoarId() {
|
||||||
|
return this.opendoarId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOpendoarId(final String opendoarId) {
|
||||||
|
this.opendoarId = opendoarId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRorId() {
|
||||||
|
return this.rorId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRorId(final String rorId) {
|
||||||
|
this.rorId = rorId;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,184 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.dom4j.Attribute;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentException;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Element;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
import org.junit.jupiter.api.Disabled;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
|
||||||
|
@Disabled
|
||||||
|
public class BaseCollectorIteratorTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testImportFile() throws Exception {
|
||||||
|
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
final BaseCollectorIterator iterator = new BaseCollectorIterator("base-sample.tar", new AggregatorReport());
|
||||||
|
|
||||||
|
final Map<String, Map<String, String>> collections = new HashMap<>();
|
||||||
|
final Map<String, AtomicInteger> fields = new HashMap<>();
|
||||||
|
final Set<String> types = new HashSet<>();
|
||||||
|
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
|
||||||
|
final Document record = DocumentHelper.parseText(iterator.next());
|
||||||
|
|
||||||
|
count++;
|
||||||
|
|
||||||
|
if ((count % 1000) == 0) {
|
||||||
|
System.out.println("# Read records: " + count);
|
||||||
|
}
|
||||||
|
|
||||||
|
// System.out.println(record.asXML());
|
||||||
|
|
||||||
|
for (final Object o : record.selectNodes("//*|//@*")) {
|
||||||
|
final String path = ((Node) o).getPath();
|
||||||
|
|
||||||
|
if (fields.containsKey(path)) {
|
||||||
|
fields.get(path).incrementAndGet();
|
||||||
|
} else {
|
||||||
|
fields.put(path, new AtomicInteger(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (o instanceof Element) {
|
||||||
|
final Element n = (Element) o;
|
||||||
|
|
||||||
|
if ("collection".equals(n.getName())) {
|
||||||
|
final String collName = n.getText().trim();
|
||||||
|
if (StringUtils.isNotBlank(collName) && !collections.containsKey(collName)) {
|
||||||
|
final Map<String, String> collAttrs = new HashMap<>();
|
||||||
|
for (final Object ao : n.attributes()) {
|
||||||
|
collAttrs.put(((Attribute) ao).getName(), ((Attribute) ao).getValue());
|
||||||
|
}
|
||||||
|
collections.put(collName, collAttrs);
|
||||||
|
}
|
||||||
|
} else if ("type".equals(n.getName())) {
|
||||||
|
types.add(n.getText().trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
for (final Entry<String, Map<String, String>> e : collections.entrySet()) {
|
||||||
|
System.out.println(e.getKey() + ": " + mapper.writeValueAsString(e.getValue()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
for (final Entry<String, AtomicInteger> e : fields.entrySet()) {
|
||||||
|
System.out.println(e.getKey() + ": " + e.getValue().get());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println("TYPES: ");
|
||||||
|
for (final String s : types) {
|
||||||
|
System.out.println(s);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
assertEquals(30000, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParquet() throws Exception {
|
||||||
|
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("record.xml"));
|
||||||
|
|
||||||
|
final SparkSession spark = SparkSession.builder().master("local[*]").getOrCreate();
|
||||||
|
|
||||||
|
final List<BaseRecordInfo> ls = new ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
ls.add(extractInfo(xml));
|
||||||
|
}
|
||||||
|
|
||||||
|
final JavaRDD<BaseRecordInfo> rdd = JavaSparkContext
|
||||||
|
.fromSparkContext(spark.sparkContext())
|
||||||
|
.parallelize(ls);
|
||||||
|
|
||||||
|
final Dataset<BaseRecordInfo> df = spark
|
||||||
|
.createDataset(rdd.rdd(), Encoders.bean(BaseRecordInfo.class));
|
||||||
|
|
||||||
|
df.printSchema();
|
||||||
|
|
||||||
|
df.show(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private BaseRecordInfo extractInfo(final String s) {
|
||||||
|
try {
|
||||||
|
final Document record = DocumentHelper.parseText(s);
|
||||||
|
|
||||||
|
final BaseRecordInfo info = new BaseRecordInfo();
|
||||||
|
|
||||||
|
final Set<String> paths = new LinkedHashSet<>();
|
||||||
|
final Set<String> types = new LinkedHashSet<>();
|
||||||
|
final List<BaseCollectionInfo> colls = new ArrayList<>();
|
||||||
|
|
||||||
|
for (final Object o : record.selectNodes("//*|//@*")) {
|
||||||
|
paths.add(((Node) o).getPath());
|
||||||
|
|
||||||
|
if (o instanceof Element) {
|
||||||
|
final Element n = (Element) o;
|
||||||
|
|
||||||
|
final String nodeName = n.getName();
|
||||||
|
|
||||||
|
if ("collection".equals(nodeName)) {
|
||||||
|
final String collName = n.getText().trim();
|
||||||
|
|
||||||
|
if (StringUtils.isNotBlank(collName)) {
|
||||||
|
final BaseCollectionInfo coll = new BaseCollectionInfo();
|
||||||
|
coll.setId(collName);
|
||||||
|
coll.setOpendoarId(n.valueOf("@opendoar_id").trim());
|
||||||
|
coll.setRorId(n.valueOf("@ror_id").trim());
|
||||||
|
colls.add(coll);
|
||||||
|
}
|
||||||
|
} else if ("type".equals(nodeName)) {
|
||||||
|
types.add("TYPE: " + n.getText().trim());
|
||||||
|
} else if ("typenorm".equals(nodeName)) {
|
||||||
|
types.add("TYPE_NORM: " + n.getText().trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info.setId(record.valueOf("//*[local-name() = 'header']/*[local-name() = 'identifier']").trim());
|
||||||
|
info.getTypes().addAll(types);
|
||||||
|
info.getPaths().addAll(paths);
|
||||||
|
info.setCollections(colls);
|
||||||
|
|
||||||
|
return info;
|
||||||
|
} catch (final DocumentException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
class BaseCollectorPluginTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFilterXml() throws Exception {
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("record.xml"));
|
||||||
|
|
||||||
|
final Set<String> validIds = new HashSet<>(Arrays.asList("opendoar____::1234", "opendoar____::4567"));
|
||||||
|
final Set<String> validTypes = new HashSet<>(Arrays.asList("1", "121"));
|
||||||
|
final Set<String> validTypes2 = new HashSet<>(Arrays.asList("1", "11"));
|
||||||
|
|
||||||
|
assertTrue(BaseCollectorPlugin.filterXml(xml, validIds, validTypes));
|
||||||
|
assertTrue(BaseCollectorPlugin.filterXml(xml, validIds, new HashSet<>()));
|
||||||
|
|
||||||
|
assertFalse(BaseCollectorPlugin.filterXml(xml, new HashSet<>(), validTypes));
|
||||||
|
assertFalse(BaseCollectorPlugin.filterXml(xml, validIds, validTypes2));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class BaseRecordInfo implements Serializable {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = -8848232018350074593L;
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
private List<BaseCollectionInfo> collections = new ArrayList<>();
|
||||||
|
private List<String> paths = new ArrayList<>();
|
||||||
|
private List<String> types = new ArrayList<>();
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return this.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(final String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getPaths() {
|
||||||
|
return this.paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPaths(final List<String> paths) {
|
||||||
|
this.paths = paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getTypes() {
|
||||||
|
return this.types;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTypes(final List<String> types) {
|
||||||
|
this.types = types;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<BaseCollectionInfo> getCollections() {
|
||||||
|
return this.collections;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCollections(final List<BaseCollectionInfo> collections) {
|
||||||
|
this.collections = collections;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,94 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.base;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.util.LongAccumulator;
|
||||||
|
import org.dom4j.io.SAXReader;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest;
|
||||||
|
import eu.dnetlib.dhp.aggregation.common.AggregationCounter;
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.MetadataRecord;
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.Provenance;
|
||||||
|
import eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction;
|
||||||
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
|
|
||||||
|
// @Disabled
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class BaseTransfomationTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
|
private SparkConf sparkConf;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() throws IOException, ISLookUpException {
|
||||||
|
setUpVocabulary();
|
||||||
|
|
||||||
|
this.sparkConf = new SparkConf();
|
||||||
|
this.sparkConf.setMaster("local[*]");
|
||||||
|
this.sparkConf.set("spark.driver.host", "localhost");
|
||||||
|
this.sparkConf.set("spark.ui.enabled", "false");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBase2ODF() throws Exception {
|
||||||
|
|
||||||
|
final MetadataRecord mr = new MetadataRecord();
|
||||||
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("record.xml")));
|
||||||
|
|
||||||
|
final XSLTTransformationFunction tr = loadTransformationRule("xml/base2odf.transformationRule.xml");
|
||||||
|
|
||||||
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
|
System.out.println(result.getBody());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBase2OAF() throws Exception {
|
||||||
|
|
||||||
|
final MetadataRecord mr = new MetadataRecord();
|
||||||
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("record.xml")));
|
||||||
|
|
||||||
|
final XSLTTransformationFunction tr = loadTransformationRule("xml/base2oaf.transformationRule.xml");
|
||||||
|
|
||||||
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
|
System.out.println(result.getBody());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBase2ODF_wrong_date() throws Exception {
|
||||||
|
|
||||||
|
final MetadataRecord mr = new MetadataRecord();
|
||||||
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("record_wrong_1.xml")));
|
||||||
|
|
||||||
|
final XSLTTransformationFunction tr = loadTransformationRule("xml/base2oaf.transformationRule.xml");
|
||||||
|
|
||||||
|
assertThrows(NullPointerException.class, () -> {
|
||||||
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
System.out.println(result.getBody());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
||||||
|
final String xslt = new SAXReader()
|
||||||
|
.read(this.getClass().getResourceAsStream(path))
|
||||||
|
.selectSingleNode("//CODE/*")
|
||||||
|
.asXML();
|
||||||
|
|
||||||
|
final LongAccumulator la = new LongAccumulator();
|
||||||
|
|
||||||
|
return new XSLTTransformationFunction(new AggregationCounter(la, la, la), xslt, 0, this.vocabularies);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -9,6 +9,7 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,7 +38,7 @@ public class RestIteratorTest {
|
||||||
|
|
||||||
@Disabled
|
@Disabled
|
||||||
@Test
|
@Test
|
||||||
public void test() {
|
public void test() throws CollectorException {
|
||||||
|
|
||||||
HttpClientParams clientParams = new HttpClientParams();
|
HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.utils;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
class JsonUtilsTest {
|
||||||
|
|
||||||
|
static private String wrapped(String xml) {
|
||||||
|
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?><recordWrap>" + xml + "</recordWrap>";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void keyStartWithDigit() {
|
||||||
|
assertEquals(
|
||||||
|
wrapped("<m_100><n_200v>null</n_200v></m_100>"),
|
||||||
|
JsonUtils.convertToXML("{\"100\" : {\"200v\" : null}}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void keyStartWithSpecialchars() {
|
||||||
|
assertEquals(
|
||||||
|
wrapped("<_parent><_nest1><_nest2>null</_nest2></_nest1></_parent>"),
|
||||||
|
JsonUtils.convertToXML("{\" parent\" : {\"-nest1\" : {\".nest2\" : null}}}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void encodeArray() {
|
||||||
|
assertEquals(
|
||||||
|
wrapped("<_parent.child>1</_parent.child><_parent.child>2</_parent.child>"),
|
||||||
|
JsonUtils.convertToXML("{\" parent.child\":[1, 2]}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void arrayOfObjects() {
|
||||||
|
assertEquals(
|
||||||
|
wrapped("<parent><id>1</id></parent><parent><id>2</id></parent>"),
|
||||||
|
JsonUtils.convertToXML("{\"parent\": [{\"id\": 1}, {\"id\": 2}]}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void removeControlCharacters() {
|
||||||
|
assertEquals(
|
||||||
|
wrapped("<m_100><n_200v>Test</n_200v></m_100>"),
|
||||||
|
JsonUtils.convertToXML("{\"100\" : {\"200v\" : \"\\u0000\\u000cTest\"}}"));
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.transformation;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.Constants.MDSTORE_DATA_PATH;
|
import static eu.dnetlib.dhp.common.Constants.MDSTORE_DATA_PATH;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
@ -279,6 +280,19 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
// TODO Create significant Assert
|
// TODO Create significant Assert
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidXSLT() throws Exception{
|
||||||
|
final MetadataRecord mr = new MetadataRecord();
|
||||||
|
|
||||||
|
mr.setProvenance(new Provenance("openaire____::cnr_explora", "CNR ExploRA", "cnr_________"));
|
||||||
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_cnr_explora.xml")));
|
||||||
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
|
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/invalid.xslt");
|
||||||
|
|
||||||
|
assertThrows(RuntimeException.class,()->tr.call(mr));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
||||||
final String trValue = IOUtils.toString(this.getClass().getResourceAsStream(path));
|
final String trValue = IOUtils.toString(this.getClass().getResourceAsStream(path));
|
||||||
final LongAccumulator la = new LongAccumulator();
|
final LongAccumulator la = new LongAccumulator();
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
DOI,OAID,level1,level2,level3,level4,score_for_L3,score_for_L4
|
||||||
|
N/A,78975075580c::e680668c98366c9cd6349afc62486a7f,03 medical and health sciences,0301 basic medicine,030104 developmental biology,N/A,0.5,0.0
|
||||||
|
N/A,78975075580c::e680668c98366c9cd6349afc62486a7f,03 medical and health sciences,0303 health sciences,030304 developmental biology,N/A,0.5,0.0
|
||||||
|
N/A,od______2806::a1da9d2678b12969a9ab5f50b5e71d0a,05 social sciences,0501 psychology and cognitive sciences,050109 social psychology,05010904 Group processes/Collective identity,0.5589094161987305,0.5166763067245483
|
||||||
|
N/A,od______2806::a1da9d2678b12969a9ab5f50b5e71d0a,05 social sciences,0501 psychology and cognitive sciences,050105 experimental psychology,05010501 Emotion/Affective science,0.44109055399894714,0.4833236634731293
|
||||||
|
N/A,doajarticles::76535d77fd2a5fe9810aefafffb8ef6c,05 social sciences,0502 economics and business,050203 business & management,05020302 Supply chain management/Business terms,0.5459638833999634,0.5460261106491089
|
||||||
|
N/A,doajarticles::76535d77fd2a5fe9810aefafffb8ef6c,05 social sciences,0502 economics and business,050211 marketing,05021102 Services marketing/Retailing,0.4540362060070038,0.4539738595485687
|
||||||
|
N/A,od_______156::a3a0119c6d9d3a66943f8da042e97a5e,01 natural sciences,0105 earth and related environmental sciences,010504 meteorology & atmospheric sciences,01050407 Geomagnetism/Ionosphere,0.5131047964096069,0.4990350902080536
|
||||||
|
N/A,od_______156::a3a0119c6d9d3a66943f8da042e97a5e,01 natural sciences,0105 earth and related environmental sciences,010502 geochemistry & geophysics,01050203 Seismology/Seismology measurement,0.4868951737880707,0.500964879989624
|
||||||
|
N/A,od______2806::4b9a664dd6b8b04204cb613e7bc9c873,03 medical and health sciences,0302 clinical medicine,030220 oncology & carcinogenesis,03022002 Medical imaging/Medical physics,0.5068133473396301,0.10231181626910052
|
||||||
|
N/A,od______2806::4b9a664dd6b8b04204cb613e7bc9c873,03 medical and health sciences,0302 clinical medicine,030204 cardiovascular system & hematology,N/A,0.49318668246269226,0.0
|
||||||
|
N/A,od______3341::ef754de29464abf9bc9b99664630ce74,03 medical and health sciences,0302 clinical medicine,030220 oncology & carcinogenesis,03022012 Oncology/Infectious causes of cancer,0.5,0.5
|
||||||
|
N/A,od______3341::ef754de29464abf9bc9b99664630ce74,03 medical and health sciences,0302 clinical medicine,030220 oncology & carcinogenesis,03022012 Oncology/Infectious causes of cancer,0.5,0.5
|
||||||
|
N/A,od______3978::6704dcced0fe3dd6fbf985dc2507f61c,03 medical and health sciences,0302 clinical medicine,030217 neurology & neurosurgery,03021702 Aging-associated diseases/Cognitive disorders,0.5134317874908447,0.09614889098529535
|
||||||
|
N/A,od______3978::6704dcced0fe3dd6fbf985dc2507f61c,03 medical and health sciences,0301 basic medicine,030104 developmental biology,N/A,0.48656824231147766,0.0
|
||||||
|
N/A,dedup_wf_001::b77264819800b90c0328c4d17eea5c1a,02 engineering and technology,0209 industrial biotechnology,020901 industrial engineering & automation,02090105 Control theory/Advanced driver assistance systems,0.5178514122962952,0.5198937654495239
|
||||||
|
N/A,dedup_wf_001::b77264819800b90c0328c4d17eea5c1a,02 engineering and technology,"0202 electrical engineering, electronic engineering, information engineering",020201 artificial intelligence & image processing,02020108 Fuzzy logic/Artificial neural networks/Computational neuroscience,0.48214852809906006,0.4801062345504761
|
||||||
|
N/A,od______2806::a938609e9f36ada6629a1bcc50c88230,03 medical and health sciences,0302 clinical medicine,030217 neurology & neurosurgery,03021708 Neurotrauma/Stroke,0.5014800429344177,0.5109656453132629
|
||||||
|
N/A,od______2806::a938609e9f36ada6629a1bcc50c88230,02 engineering and technology,0206 medical engineering,020601 biomedical engineering,02060102 Medical terminology/Patient,0.4985199570655823,0.4890343248844147
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
{"doi":"n/a","oaid":"od______3341::ef754de29464abf9bc9b99664630ce74","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030220 oncology & carcinogenesis","level4":"03022012 Oncology/Infectious causes of cancer","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"n/a","oaid":"78975075580c::e680668c98366c9cd6349afc62486a7f","level1":"03 medical and health sciences","level2":"0301 basic medicine","level3":"030104 developmental biology","level4":"N/A","scoreL3":"0.5","scoreL4":"0.0"}
|
||||||
|
{"doi":"n/a","oaid":"od______3341::ef754de29464abf9bc9b99664630ce74","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030220 oncology & carcinogenesis","level4":"03022012 Oncology/Infectious causes of cancer","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"n/a","oaid":"78975075580c::e680668c98366c9cd6349afc62486a7f","level1":"03 medical and health sciences","level2":"0303 health sciences","level3":"030304 developmental biology","level4":"N/A","scoreL3":"0.5","scoreL4":"0.0"}
|
||||||
|
{"doi":"n/a","oaid":"od______3978::6704dcced0fe3dd6fbf985dc2507f61c","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030217 neurology & neurosurgery","level4":"03021702 Aging-associated diseases/Cognitive disorders","scoreL3":"0.5134317874908447","scoreL4":"0.09614889098529535"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::a1da9d2678b12969a9ab5f50b5e71d0a","level1":"05 social sciences","level2":"0501 psychology and cognitive sciences","level3":"050109 social psychology","level4":"05010904 Group processes/Collective identity","scoreL3":"0.5589094161987305","scoreL4":"0.5166763067245483"}
|
||||||
|
{"doi":"n/a","oaid":"od______3978::6704dcced0fe3dd6fbf985dc2507f61c","level1":"03 medical and health sciences","level2":"0301 basic medicine","level3":"030104 developmental biology","level4":"N/A","scoreL3":"0.48656824231147766","scoreL4":"0.0"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::a1da9d2678b12969a9ab5f50b5e71d0a","level1":"05 social sciences","level2":"0501 psychology and cognitive sciences","level3":"050105 experimental psychology","level4":"05010501 Emotion/Affective science","scoreL3":"0.44109055399894714","scoreL4":"0.4833236634731293"}
|
||||||
|
{"doi":"n/a","oaid":"dedup_wf_001::b77264819800b90c0328c4d17eea5c1a","level1":"02 engineering and technology","level2":"0209 industrial biotechnology","level3":"020901 industrial engineering & automation","level4":"02090105 Control theory/Advanced driver assistance systems","scoreL3":"0.5178514122962952","scoreL4":"0.5198937654495239"}
|
||||||
|
{"doi":"n/a","oaid":"doajarticles::76535d77fd2a5fe9810aefafffb8ef6c","level1":"05 social sciences","level2":"0502 economics and business","level3":"050203 business & management","level4":"05020302 Supply chain management/Business terms","scoreL3":"0.5459638833999634","scoreL4":"0.5460261106491089"}
|
||||||
|
{"doi":"n/a","oaid":"doajarticles::76535d77fd2a5fe9810aefafffb8ef6c","level1":"05 social sciences","level2":"0502 economics and business","level3":"050211 marketing","level4":"05021102 Services marketing/Retailing","scoreL3":"0.4540362060070038","scoreL4":"0.4539738595485687"}
|
||||||
|
{"doi":"n/a","oaid":"dedup_wf_001::b77264819800b90c0328c4d17eea5c1a","level1":"02 engineering and technology","level2":"0202 electrical engineering, electronic engineering, information engineering","level3":"020201 artificial intelligence & image processing","level4":"02020108 Fuzzy logic/Artificial neural networks/Computational neuroscience","scoreL3":"0.48214852809906006","scoreL4":"0.4801062345504761"}
|
||||||
|
{"doi":"n/a","oaid":"od_______156::a3a0119c6d9d3a66943f8da042e97a5e","level1":"01 natural sciences","level2":"0105 earth and related environmental sciences","level3":"010504 meteorology & atmospheric sciences","level4":"01050407 Geomagnetism/Ionosphere","scoreL3":"0.5131047964096069","scoreL4":"0.4990350902080536"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::a938609e9f36ada6629a1bcc50c88230","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030217 neurology & neurosurgery","level4":"03021708 Neurotrauma/Stroke","scoreL3":"0.5014800429344177","scoreL4":"0.5109656453132629"}
|
||||||
|
{"doi":"n/a","oaid":"od_______156::a3a0119c6d9d3a66943f8da042e97a5e","level1":"01 natural sciences","level2":"0105 earth and related environmental sciences","level3":"010502 geochemistry & geophysics","level4":"01050203 Seismology/Seismology measurement","scoreL3":"0.4868951737880707","scoreL4":"0.500964879989624"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::a938609e9f36ada6629a1bcc50c88230","level1":"02 engineering and technology","level2":"0206 medical engineering","level3":"020601 biomedical engineering","level4":"02060102 Medical terminology/Patient","scoreL3":"0.4985199570655823","scoreL4":"0.4890343248844147"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::4b9a664dd6b8b04204cb613e7bc9c873","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030220 oncology & carcinogenesis","level4":"03022002 Medical imaging/Medical physics","scoreL3":"0.5068133473396301","scoreL4":"0.10231181626910052"}
|
||||||
|
{"doi":"n/a","oaid":"od______2806::4b9a664dd6b8b04204cb613e7bc9c873","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030204 cardiovascular system & hematology","level4":"N/A","scoreL3":"0.49318668246269226","scoreL4":"0.0"}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,31 @@
|
||||||
|
{"cited":"br/061201599020", "citing":"br/06203041400","oci":"oci:06701327944-06504326071"}
|
||||||
|
{"cited":"br/061201599020","citing":"br/06502272390","oci":"oci:06502272390-061301355525"}
|
||||||
|
{"cited":"br/061201599020", "citing":"br/06120941789","oci":"oci:0670804699-067055659"}
|
||||||
|
{"cited":"br/06210273177","citing":"br/06203041400","oci":"oci:061502003994-062201281456"}
|
||||||
|
{"cited":"br/06210273177", "citing":"br/06502272390","oci":"oci:06502272390-0660806688"}
|
||||||
|
{"cited":"br/06210273177", "citing":"br/06120941789","oci":"oci:06502307119-0620223645"}
|
||||||
|
{"cited":"br/0660613430","citing":"br/06203041400","oci":"oci:061502004011-061902692285"}
|
||||||
|
{"cited":"br/0660613430", "citing":"br/06502272390","oci":"oci:0660549063-0610398792"}
|
||||||
|
{"cited":"br/0660613430", "citing":"br/06120941789","oci":"oci:06420189324-06301543046"}
|
||||||
|
{"cited":"br/062602732073","citing":"br/06203041400","oci":"oci:06380130275-061502004367"}
|
||||||
|
{"cited":"br/062602732073","citing":"br/06502272390","oci":"oci:062403449086-062501448395"}
|
||||||
|
{"cited":"br/062602732073","citing":"br/06120941789","oci":"oci:06420189328-061202007182"}
|
||||||
|
{"cited":"br/061103703697","citing":"br/06203041400","oci":"oci:062603906965-061701362658"}
|
||||||
|
{"cited":"br/061103703697", "citing":"br/06502272390","oci":"oci:0670294309-06104327031"}
|
||||||
|
{"cited":"br/061103703697","citing":"br/06120941789","oci":"oci:061702060228-061301712529"}
|
||||||
|
{"cited":"br/06230199640", "citing":"br/0670517081","oci":"oci:06901104174-06503692526"}
|
||||||
|
{"cited":"br/061703513967","citing":"br/061702310822","oci":"oci:061702310822-061703513967"}
|
||||||
|
{"cited":"br/062104002953","citing":"br/061702311472","oci":"oci:061702311472-062104002953"}
|
||||||
|
{"cited":"br/061101204417","citing":"br/062102701590","oci":"oci:062102701590-061101204417"}
|
||||||
|
{"cited":"br/062403787088","citing":"br/061401499173","oci":"oci:061401499173-062403787088"}
|
||||||
|
{"cited":"br/061203576338","citing":"br/06110279619","oci":"oci:06110279619-061203576338"}
|
||||||
|
{"cited":"br/061601962207","citing":"br/061502004018","oci":"oci:061502004018-061601962207"}
|
||||||
|
{"cited":"br/06101014588", "citing":"br/061502004027","oci":"oci:061502004027-06101014588"}
|
||||||
|
{"cited":"br/06704040804", "citing":"br/06220799044","oci":"oci:06220799044-06704040804"}
|
||||||
|
{"cited":"br/061401105151","citing":"br/061502004037","oci":"oci:061502004037-061401105151"}
|
||||||
|
{"cited":"br/0640821079", "citing":"br/061702311537","oci":"oci:061702311537-0640821079"}
|
||||||
|
{"cited":"br/06604165310", "citing":"br/062501970289","oci":"oci:062501970289-06604165310"}
|
||||||
|
{"cited":"br/061501351689","citing":"br/061203895786","oci":"oci:061203895786-061501351689"}
|
||||||
|
{"cited":"br/06202223692", "citing":"br/06110298832","oci":"oci:06110298832-06202223692"}
|
||||||
|
{"cited":"br/06104310727", "citing":"br/0660439086","oci":"oci:0660439086-06104310727"}
|
||||||
|
{"cited":"br/06150216214", "citing":"br/06340150329","oci":"oci:06340150329-06150216214"}
|
|
@ -0,0 +1,48 @@
|
||||||
|
omid,id
|
||||||
|
br/061201599020,doi:10.1142/s0219887817501687
|
||||||
|
br/06203041400,doi:10.1111/j.1523-5378.2005.00327.x pmid:16104945
|
||||||
|
br/06210273177,doi:10.1090/qam/20394
|
||||||
|
br/06502272390,pmid:32235596 doi:10.3390/nano10040644
|
||||||
|
br/0660613430,doi:10.1007/bf00470411
|
||||||
|
br/06120941789,doi:10.1098/rspa.2006.1747
|
||||||
|
br/062602732073,doi:10.1007/978-3-642-38844-6_25
|
||||||
|
br/06230199640,pmid:25088780 doi:10.1016/j.ymeth.2014.07.008
|
||||||
|
br/061103703697,pmid:2682767
|
||||||
|
br/0670517081,doi:10.1016/j.foodpol.2021.102189
|
||||||
|
br/06502310477,doi:10.1142/s0218127416500450
|
||||||
|
br/06520113284,doi:10.1109/cfasta57821.2023.10243367
|
||||||
|
br/062303652439,pmid:5962654 doi:10.1016/0020-708x(66)90001-9
|
||||||
|
br/06250691436,doi:10.1042/bst20150052 pmid:26009172
|
||||||
|
br/061201665577,doi:10.1097/00115550-200205000-00018
|
||||||
|
br/06503490336,pmid:34689254 doi:10.1007/s10072-021-05687-0
|
||||||
|
br/06220615942,pmid:25626134 doi:10.1016/j.jcis.2015.01.008
|
||||||
|
br/061103389243,doi:10.4324/9780203702819-10
|
||||||
|
br/062303011271,doi:10.1109/icassp.2011.5946250
|
||||||
|
br/061302926083,doi:10.4018/978-1-6684-3937-1.ch002
|
||||||
|
br/061402485360,doi:10.1109/iciict.2015.7396079
|
||||||
|
br/06410101083,doi:10.1016/j.autcon.2023.104828
|
||||||
|
br/062202243386,doi:10.1016/0001-8791(81)90022-1
|
||||||
|
br/06170421486,doi:10.1130/0016-7606(2003)115<0166:dsagmf>2.0.co;2
|
||||||
|
br/061201983865,doi:10.4324/9781315109008 isbn:9781315109008
|
||||||
|
br/061701697230,doi:10.1016/j.trd.2012.07.006
|
||||||
|
br/061201137111,doi:10.1109/access.2020.2971656
|
||||||
|
br/06120436283,pmid:2254430 doi:10.1128/jcm.28.11.2551-2554.1990
|
||||||
|
br/061903968916,doi:10.1111/j.1742-1241.1988.tb08627.x
|
||||||
|
br/06201583482,doi:10.1016/0016-5085(78)93139-6
|
||||||
|
br/06130338317,doi:10.2134/agronj1952.00021962004400080013x
|
||||||
|
br/062601538320,doi:10.1371/journal.pone.0270593 pmid:35789338
|
||||||
|
br/062401098626,pmid:22385804 doi:10.1016/j.talanta.2011.12.034
|
||||||
|
br/06190436492,doi:10.1039/c7dt01499f pmid:28644489
|
||||||
|
br/06202819247,doi:10.1007/978-3-319-45823-6_57
|
||||||
|
br/0648013560,doi:10.1080/14772000.2012.705356
|
||||||
|
br/0690214059,doi:10.2752/175630608x329217
|
||||||
|
br/06601640415,doi:10.1080/18128600508685647
|
||||||
|
br/061503394761,doi:10.1002/0471443395.img018
|
||||||
|
br/061702861849,pmid:31203682 doi:10.1080/10428194.2019.1627538
|
||||||
|
br/06450133713,doi:10.1093/acprof:oso/9780199670888.003.0008
|
||||||
|
br/0628074892,doi:10.1097/hnp.0000000000000597
|
||||||
|
br/061601032219,doi:10.1002/bdm.2102
|
||||||
|
br/06602079930,doi:10.1101/2020.08.25.267500
|
||||||
|
br/0604192147,doi:10.11501/3307395
|
||||||
|
br/061101933800,doi:10.1142/s0217732398002242
|
||||||
|
br/06504184118,pmid:10091417
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
{"oci":"oci:06701327944-06504326071","citing":"16104945","citing_pid":"pmid","cited":"10.1142/s0219887817501687","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06701327944-06504326071","citing":"10.1111/j.1523-5378.2005.00327.x","citing_pid":"doi","cited":"10.1142/s0219887817501687","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06502272390-061301355525","citing":"10.3390/nano10040644","citing_pid":"doi","cited":"10.1142/s0219887817501687","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06502272390-061301355525","citing":"32235596","citing_pid":"pmid","cited":"10.1142/s0219887817501687","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:0670804699-067055659","citing":"10.1098/rspa.2006.1747","citing_pid":"doi","cited":"10.1142/s0219887817501687","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:061502003994-062201281456","citing":"16104945","citing_pid":"pmid","cited":"10.1090/qam/20394","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:061502003994-062201281456","citing":"10.1111/j.1523-5378.2005.00327.x","citing_pid":"doi","cited":"10.1090/qam/20394","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06502272390-0660806688","citing":"10.3390/nano10040644","citing_pid":"doi","cited":"10.1090/qam/20394","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06502272390-0660806688","citing":"32235596","citing_pid":"pmid","cited":"10.1090/qam/20394","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06502307119-0620223645","citing":"10.1098/rspa.2006.1747","citing_pid":"doi","cited":"10.1090/qam/20394","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:061502004011-061902692285","citing":"16104945","citing_pid":"pmid","cited":"10.1007/bf00470411","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:061502004011-061902692285","citing":"10.1111/j.1523-5378.2005.00327.x","citing_pid":"doi","cited":"10.1007/bf00470411","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:0660549063-0610398792","citing":"10.3390/nano10040644","citing_pid":"doi","cited":"10.1007/bf00470411","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:0660549063-0610398792","citing":"32235596","citing_pid":"pmid","cited":"10.1007/bf00470411","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06420189324-06301543046","citing":"10.1098/rspa.2006.1747","citing_pid":"doi","cited":"10.1007/bf00470411","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06380130275-061502004367","citing":"16104945","citing_pid":"pmid","cited":"10.1007/978-3-642-38844-6_25","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06380130275-061502004367","citing":"10.1111/j.1523-5378.2005.00327.x","citing_pid":"doi","cited":"10.1007/978-3-642-38844-6_25","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:062403449086-062501448395","citing":"10.3390/nano10040644","citing_pid":"doi","cited":"10.1007/978-3-642-38844-6_25","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:062403449086-062501448395","citing":"32235596","citing_pid":"pmid","cited":"10.1007/978-3-642-38844-6_25","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06420189328-061202007182","citing":"10.1098/rspa.2006.1747","citing_pid":"doi","cited":"10.1007/978-3-642-38844-6_25","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:062603906965-061701362658","citing":"16104945","citing_pid":"pmid","cited":"2682767","cited_pid":"pmid"}
|
||||||
|
{"oci":"oci:062603906965-061701362658","citing":"10.1111/j.1523-5378.2005.00327.x","citing_pid":"doi","cited":"2682767","cited_pid":"pmid"}
|
||||||
|
{"oci":"oci:0670294309-06104327031","citing":"10.3390/nano10040644","citing_pid":"doi","cited":"2682767","cited_pid":"pmid"}
|
||||||
|
{"oci":"oci:0670294309-06104327031","citing":"32235596","citing_pid":"pmid","cited":"2682767","cited_pid":"pmid"}
|
||||||
|
{"oci":"oci:061702060228-061301712529","citing":"10.1098/rspa.2006.1747","citing_pid":"doi","cited":"2682767","cited_pid":"pmid"}
|
||||||
|
{"oci":"oci:06901104174-06503692526","citing":"10.1016/j.foodpol.2021.102189","citing_pid":"doi","cited":"10.1016/j.ymeth.2014.07.008","cited_pid":"doi"}
|
||||||
|
{"oci":"oci:06901104174-06503692526","citing":"10.1016/j.foodpol.2021.102189","citing_pid":"doi","cited":"25088780","cited_pid":"pmid"}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,966 @@
|
||||||
|
<record:record path="/0000-0001-6816-8350" xmlns:internal="http://www.orcid.org/ns/internal" xmlns:education="http://www.orcid.org/ns/education" xmlns:distinction="http://www.orcid.org/ns/distinction" xmlns:deprecated="http://www.orcid.org/ns/deprecated" xmlns:other-name="http://www.orcid.org/ns/other-name" xmlns:membership="http://www.orcid.org/ns/membership" xmlns:error="http://www.orcid.org/ns/error" xmlns:common="http://www.orcid.org/ns/common" xmlns:record="http://www.orcid.org/ns/record" xmlns:personal-details="http://www.orcid.org/ns/personal-details" xmlns:keyword="http://www.orcid.org/ns/keyword" xmlns:email="http://www.orcid.org/ns/email" xmlns:external-identifier="http://www.orcid.org/ns/external-identifier" xmlns:funding="http://www.orcid.org/ns/funding" xmlns:preferences="http://www.orcid.org/ns/preferences" xmlns:address="http://www.orcid.org/ns/address" xmlns:invited-position="http://www.orcid.org/ns/invited-position" xmlns:work="http://www.orcid.org/ns/work" xmlns:history="http://www.orcid.org/ns/history" xmlns:employment="http://www.orcid.org/ns/employment" xmlns:qualification="http://www.orcid.org/ns/qualification" xmlns:service="http://www.orcid.org/ns/service" xmlns:person="http://www.orcid.org/ns/person" xmlns:activities="http://www.orcid.org/ns/activities" xmlns:researcher-url="http://www.orcid.org/ns/researcher-url" xmlns:peer-review="http://www.orcid.org/ns/peer-review" xmlns:bulk="http://www.orcid.org/ns/bulk" xmlns:research-resource="http://www.orcid.org/ns/research-resource">
|
||||||
|
<common:orcid-identifier>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:orcid-identifier>
|
||||||
|
<preferences:preferences>
|
||||||
|
<preferences:locale>en</preferences:locale>
|
||||||
|
</preferences:preferences>
|
||||||
|
<history:history>
|
||||||
|
<history:creation-method>Direct</history:creation-method>
|
||||||
|
<history:submission-date>2016-01-06T05:08:45.720Z</history:submission-date>
|
||||||
|
<common:last-modified-date>2024-01-02T20:07:05.186Z</common:last-modified-date>
|
||||||
|
<history:claimed>true</history:claimed>
|
||||||
|
<history:verified-email>true</history:verified-email>
|
||||||
|
<history:verified-primary-email>true</history:verified-primary-email>
|
||||||
|
</history:history>
|
||||||
|
<person:person path="/0000-0001-6816-8350/person">
|
||||||
|
<common:last-modified-date>2023-12-02T13:32:05.269Z</common:last-modified-date>
|
||||||
|
<other-name:other-names path="/0000-0001-6816-8350/other-names"/>
|
||||||
|
<researcher-url:researcher-urls path="/0000-0001-6816-8350/researcher-urls">
|
||||||
|
<common:last-modified-date>2016-02-09T09:18:18.417Z</common:last-modified-date>
|
||||||
|
<researcher-url:researcher-url put-code="633431" visibility="public" path="/0000-0001-6816-8350/researcher-urls/633431" display-index="0">
|
||||||
|
<common:created-date>2016-02-09T09:18:18.416Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T09:18:18.417Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<researcher-url:url-name>Dr Michael Muchiri</researcher-url:url-name>
|
||||||
|
<researcher-url:url>http://www.rmit.edu.au/contact/staff-contacts/academic-staff/m/muchiri-dr-michael</researcher-url:url>
|
||||||
|
</researcher-url:researcher-url>
|
||||||
|
</researcher-url:researcher-urls>
|
||||||
|
<email:emails path="/0000-0001-6816-8350/email"/>
|
||||||
|
<address:addresses path="/0000-0001-6816-8350/address">
|
||||||
|
<common:last-modified-date>2023-12-02T13:32:05.269Z</common:last-modified-date>
|
||||||
|
<address:address put-code="897528" visibility="public" path="/0000-0001-6816-8350/address/897528" display-index="2">
|
||||||
|
<common:created-date>2018-02-13T02:32:04.094Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:32:05.269Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<address:country>AU</address:country>
|
||||||
|
</address:address>
|
||||||
|
<address:address put-code="3191142" visibility="public" path="/0000-0001-6816-8350/address/3191142" display-index="1">
|
||||||
|
<common:created-date>2023-12-02T13:32:05.260Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:32:05.260Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<address:country>SA</address:country>
|
||||||
|
</address:address>
|
||||||
|
</address:addresses>
|
||||||
|
<keyword:keywords path="/0000-0001-6816-8350/keywords">
|
||||||
|
<common:last-modified-date>2023-12-02T13:31:16.269Z</common:last-modified-date>
|
||||||
|
<keyword:keyword put-code="368304" visibility="public" path="/0000-0001-6816-8350/keywords/368304" display-index="4">
|
||||||
|
<common:created-date>2016-02-09T09:16:44.001Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:31:16.269Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<keyword:content>Organizational Behavior</keyword:content>
|
||||||
|
</keyword:keyword>
|
||||||
|
<keyword:keyword put-code="368303" visibility="public" path="/0000-0001-6816-8350/keywords/368303" display-index="3">
|
||||||
|
<common:created-date>2016-02-09T09:16:27.374Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:31:16.269Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<keyword:content>Organizational Leadership</keyword:content>
|
||||||
|
</keyword:keyword>
|
||||||
|
<keyword:keyword put-code="368306" visibility="public" path="/0000-0001-6816-8350/keywords/368306" display-index="2">
|
||||||
|
<common:created-date>2016-02-09T09:17:08.998Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:31:16.269Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<keyword:content>Organizational performance</keyword:content>
|
||||||
|
</keyword:keyword>
|
||||||
|
<keyword:keyword put-code="3590814" visibility="public" path="/0000-0001-6816-8350/keywords/3590814" display-index="1">
|
||||||
|
<common:created-date>2023-12-02T13:31:16.259Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:31:16.259Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<keyword:content>Thriving at work</keyword:content>
|
||||||
|
</keyword:keyword>
|
||||||
|
</keyword:keywords>
|
||||||
|
<external-identifier:external-identifiers path="/0000-0001-6816-8350/external-identifiers">
|
||||||
|
<common:last-modified-date>2018-04-10T00:49:55.386Z</common:last-modified-date>
|
||||||
|
<external-identifier:external-identifier put-code="998076" visibility="public" path="/0000-0001-6816-8350/external-identifiers/998076" display-index="0">
|
||||||
|
<common:created-date>2018-04-10T00:49:55.385Z</common:created-date>
|
||||||
|
<common:last-modified-date>2018-04-10T00:49:55.386Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/0000-0003-1377-5676</common:uri>
|
||||||
|
<common:path>0000-0003-1377-5676</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>ResearcherID</common:source-name>
|
||||||
|
<common:assertion-origin-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:assertion-origin-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:external-id-type>ResearcherID</common:external-id-type>
|
||||||
|
<common:external-id-value>D-1929-2018</common:external-id-value>
|
||||||
|
<common:external-id-url>http://www.researcherid.com/rid/D-1929-2018</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</external-identifier:external-identifier>
|
||||||
|
</external-identifier:external-identifiers>
|
||||||
|
</person:person>
|
||||||
|
<activities:activities-summary path="/0000-0001-6816-8350/activities">
|
||||||
|
<common:last-modified-date>2023-12-02T13:28:26.051Z</common:last-modified-date>
|
||||||
|
<activities:distinctions path="/0000-0001-6816-8350/distinctions"/>
|
||||||
|
<activities:educations path="/0000-0001-6816-8350/educations">
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:38.225Z</common:last-modified-date>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T06:55:21.838Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<education:education-summary put-code="1549986" display-index="0" path="/0000-0001-6816-8350/education/1549986" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T06:54:39.199Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T06:55:21.838Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Management</common:department-name>
|
||||||
|
<common:role-title>PhD</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2021</common:year>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2022</common:year>
|
||||||
|
<common:month>02</common:month>
|
||||||
|
<common:day>12</common:day>
|
||||||
|
</common:end-date>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2003</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2007</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>University of New England</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Armidale</common:city>
|
||||||
|
<common:region>NSW</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>1319</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</education:education-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:38.225Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<education:education-summary put-code="1549990" display-index="0" path="/0000-0001-6816-8350/education/1549990" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T06:57:04.181Z</common:created-date>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:38.225Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Psychology</common:department-name>
|
||||||
|
<common:role-title>Master of Science (Industrial and Organizational) Psychology</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>1998</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2000</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Universitas Gadjah Mada</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Yogyakarta</common:city>
|
||||||
|
<common:region>Daerah Istimewa Yogyakart</common:region>
|
||||||
|
<common:country>ID</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>59166</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</education:education-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:35.821Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<education:education-summary put-code="1549998" display-index="0" path="/0000-0001-6816-8350/education/1549998" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T06:58:59.869Z</common:created-date>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:35.821Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Education</common:department-name>
|
||||||
|
<common:role-title>Bachelor of Education (Honors)</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>1988</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>1991</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Kenyatta University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Nairobi</common:city>
|
||||||
|
<common:region>Nairobi</common:region>
|
||||||
|
<common:country>KE</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>107864</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</education:education-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
</activities:educations>
|
||||||
|
<activities:employments path="/0000-0001-6816-8350/employments">
|
||||||
|
<common:last-modified-date>2023-12-02T13:28:26.051Z</common:last-modified-date>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2023-12-02T13:28:26.051Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="21884863" display-index="1" path="/0000-0001-6816-8350/employment/21884863" visibility="public">
|
||||||
|
<common:created-date>2023-12-02T13:28:26.051Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-12-02T13:28:26.051Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Management</common:department-name>
|
||||||
|
<common:role-title>Associate Professor in Management</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2023</common:year>
|
||||||
|
<common:month>08</common:month>
|
||||||
|
<common:day>20</common:day>
|
||||||
|
</common:start-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Alfaisal University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Riyadh</common:city>
|
||||||
|
<common:country>SA</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>https://ror.org/00cdrtq48</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>ROR</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
<common:url>https://faculty.alfaisal.edu/user/mmuchiri</common:url>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:00:06.052Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550002" display-index="0" path="/0000-0001-6816-8350/employment/1550002" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:00:06.052Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:00:06.052Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Management</common:department-name>
|
||||||
|
<common:role-title>Senior Lecturer</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2014</common:year>
|
||||||
|
<common:month>02</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>RMIT University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Melbourne</common:city>
|
||||||
|
<common:region>VIC</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>5376</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:01:08.398Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550007" display-index="0" path="/0000-0001-6816-8350/employment/1550007" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:01:08.398Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:01:08.398Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Management</common:department-name>
|
||||||
|
<common:role-title>Senior Lecturer in Human Resource Management</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2010</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2014</common:year>
|
||||||
|
<common:month>02</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Central Queensland University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Rockhampton</common:city>
|
||||||
|
<common:region>QLD</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>273488</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:01:47.814Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550010" display-index="0" path="/0000-0001-6816-8350/employment/1550010" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:01:47.814Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:01:47.814Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Management</common:department-name>
|
||||||
|
<common:role-title>Lecturer in Management</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2007</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2010</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Central Queensland University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Rockhampton</common:city>
|
||||||
|
<common:region>QLD</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>273488</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:13.213Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550017" display-index="0" path="/0000-0001-6816-8350/employment/1550017" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:03:42.180Z</common:created-date>
|
||||||
|
<common:last-modified-date>2018-02-13T02:33:13.213Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Human Resource Development Division</common:department-name>
|
||||||
|
<common:role-title>Chief Human Resource Development Officer</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2005</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2007</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Government of Kenya Directorate of Personnel Management</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Nairobi</common:city>
|
||||||
|
<common:region>Nairobi</common:region>
|
||||||
|
<common:country>KE</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>360256</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:05:02.300Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550020" display-index="0" path="/0000-0001-6816-8350/employment/1550020" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:05:02.300Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:05:02.300Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Human Resource Development Division</common:department-name>
|
||||||
|
<common:role-title>Human Resource Development Officer</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2001</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2005</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Government of Kenya Directorate of Personnel Management</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Nairobi</common:city>
|
||||||
|
<common:region>Nairobi</common:region>
|
||||||
|
<common:country>KE</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>360256</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
<activities:affiliation-group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:36:52.398Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<employment:employment-summary put-code="1550050" display-index="0" path="/0000-0001-6816-8350/employment/1550050" visibility="public">
|
||||||
|
<common:created-date>2016-02-09T07:36:52.398Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:36:52.398Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<common:department-name>Public Sector Management Technical Assistance Project</common:department-name>
|
||||||
|
<common:role-title>Project Coordinator for Development Learning Centre</common:role-title>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2002</common:year>
|
||||||
|
<common:month>08</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2003</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Government of Kenya Directorate of Personnel Management</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Nairobi</common:city>
|
||||||
|
<common:region>Nairobi</common:region>
|
||||||
|
<common:country>KE</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>360256</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>RINGGOLD</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</employment:employment-summary>
|
||||||
|
</activities:affiliation-group>
|
||||||
|
</activities:employments>
|
||||||
|
<activities:fundings path="/0000-0001-6816-8350/fundings">
|
||||||
|
<common:last-modified-date>2016-02-09T09:05:27.100Z</common:last-modified-date>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2016-02-09T09:05:27.100Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<funding:funding-summary put-code="150520" path="/0000-0001-6816-8350/funding/150520" visibility="public" display-index="6">
|
||||||
|
<common:created-date>2016-02-09T09:05:27.100Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T09:05:27.100Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<funding:title>
|
||||||
|
<common:title>A cross-country examination of Employee Wellbeing, Leadership, High Performance Work Systems and Innovative Behaviours</common:title>
|
||||||
|
</funding:title>
|
||||||
|
<funding:type>grant</funding:type>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2016</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2016</common:year>
|
||||||
|
<common:month>12</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>RMIT University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>VIC</common:city>
|
||||||
|
<common:region>VIC</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>http://dx.doi.org/10.13039/501100001780</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>FUNDREF</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</funding:funding-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2016-02-09T09:03:51.641Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<funding:funding-summary put-code="150518" path="/0000-0001-6816-8350/funding/150518" visibility="public" display-index="5">
|
||||||
|
<common:created-date>2016-02-09T09:03:51.641Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T09:03:51.641Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<funding:title>
|
||||||
|
<common:title>Leading Safe and Thriving Organisations: An Investigation of the Relationships between Leadership, Thriving Behaviour, Authentic Followership and Safety Climate in an Australian Multinational Enterprise</common:title>
|
||||||
|
</funding:title>
|
||||||
|
<funding:type>grant</funding:type>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2015</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2015</common:year>
|
||||||
|
<common:month>12</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>RMIT University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>VIC</common:city>
|
||||||
|
<common:region>VIC</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>http://dx.doi.org/10.13039/501100001780</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>FUNDREF</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</funding:funding-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2016-02-09T09:02:28.297Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<funding:funding-summary put-code="150516" path="/0000-0001-6816-8350/funding/150516" visibility="public" display-index="4">
|
||||||
|
<common:created-date>2016-02-09T09:02:28.297Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T09:02:28.297Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<funding:title>
|
||||||
|
<common:title>A multilevel, cross-country examination of leadership, followership and innovative behaviours in Australia and Indonesia. </common:title>
|
||||||
|
</funding:title>
|
||||||
|
<funding:type>grant</funding:type>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2015</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2015</common:year>
|
||||||
|
<common:month>12</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>RMIT University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>VIC</common:city>
|
||||||
|
<common:region>VIC</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>http://dx.doi.org/10.13039/501100001780</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>FUNDREF</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</funding:funding-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2016-02-09T09:00:51.749Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<funding:funding-summary put-code="150514" path="/0000-0001-6816-8350/funding/150514" visibility="public" display-index="3">
|
||||||
|
<common:created-date>2016-02-09T09:00:51.749Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T09:00:51.749Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<funding:title>
|
||||||
|
<common:title>Workplace safety and positive leadership: Exploring relationships between leader behaviours, organisational climate, safety climate, safety citizenship behaviours and innovative behaviours within city councils in Victoria </common:title>
|
||||||
|
</funding:title>
|
||||||
|
<funding:type>grant</funding:type>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2014</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2014</common:year>
|
||||||
|
<common:month>12</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>RMIT University</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>VIC</common:city>
|
||||||
|
<common:region>VIC</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>http://dx.doi.org/10.13039/501100001780</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>FUNDREF</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</common:organization>
|
||||||
|
</funding:funding-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2016-02-09T07:46:44.919Z</common:last-modified-date>
|
||||||
|
<common:external-ids/>
|
||||||
|
<funding:funding-summary put-code="150485" path="/0000-0001-6816-8350/funding/150485" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2016-02-09T07:46:44.919Z</common:created-date>
|
||||||
|
<common:last-modified-date>2016-02-09T07:46:44.919Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-orcid>
|
||||||
|
<common:uri>https://orcid.org/0000-0001-6816-8350</common:uri>
|
||||||
|
<common:path>0000-0001-6816-8350</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-orcid>
|
||||||
|
</common:source>
|
||||||
|
<funding:title>
|
||||||
|
<common:title>Sustainable Business Model for Central Queensland Regional Information Systems.</common:title>
|
||||||
|
</funding:title>
|
||||||
|
<funding:type>grant</funding:type>
|
||||||
|
<common:start-date>
|
||||||
|
<common:year>2008</common:year>
|
||||||
|
<common:month>01</common:month>
|
||||||
|
</common:start-date>
|
||||||
|
<common:end-date>
|
||||||
|
<common:year>2008</common:year>
|
||||||
|
<common:month>12</common:month>
|
||||||
|
</common:end-date>
|
||||||
|
<common:organization>
|
||||||
|
<common:name>Department of Local Government, Planning, Sport and Recreation, Queensland, Australia </common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>Rockhampton</common:city>
|
||||||
|
<common:region>Central Queensland</common:region>
|
||||||
|
<common:country>AU</common:country>
|
||||||
|
</common:address>
|
||||||
|
</common:organization>
|
||||||
|
</funding:funding-summary>
|
||||||
|
</activities:group>
|
||||||
|
</activities:fundings>
|
||||||
|
<activities:invited-positions path="/0000-0001-6816-8350/invited-positions"/>
|
||||||
|
<activities:memberships path="/0000-0001-6816-8350/memberships"/>
|
||||||
|
<activities:peer-reviews path="/0000-0001-6816-8350/peer-reviews">
|
||||||
|
<common:last-modified-date>2023-05-31T05:53:44.542Z</common:last-modified-date>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2023-05-31T05:53:44.542Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>peer-review</common:external-id-type>
|
||||||
|
<common:external-id-value>issn:0167-4544</common:external-id-value>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<activities:peer-review-group>
|
||||||
|
<common:last-modified-date>2023-02-28T06:51:52.426Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>source-work-id</common:external-id-type>
|
||||||
|
<common:external-id-value>c9bdf086-cfee-4cd9-bcfb-268cc5423248</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">c9bdf086-cfee-4cd9-bcfb-268cc5423248</common:external-id-normalized>
|
||||||
|
<common:external-id-url></common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<peer-review:peer-review-summary put-code="8741329" path="/0000-0001-6816-8350/peer-review/8741329" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2023-02-28T06:51:52.426Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-02-28T06:51:52.426Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/APP-945VYTN20C7BZXYT</common:uri>
|
||||||
|
<common:path>APP-945VYTN20C7BZXYT</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>Springer Nature @ Editorial Manager</common:source-name>
|
||||||
|
</common:source>
|
||||||
|
<peer-review:reviewer-role>reviewer</peer-review:reviewer-role>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>source-work-id</common:external-id-type>
|
||||||
|
<common:external-id-value>c9bdf086-cfee-4cd9-bcfb-268cc5423248</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">c9bdf086-cfee-4cd9-bcfb-268cc5423248</common:external-id-normalized>
|
||||||
|
<common:external-id-url></common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<peer-review:review-type>review</peer-review:review-type>
|
||||||
|
<peer-review:completion-date>
|
||||||
|
<common:year>2023</common:year>
|
||||||
|
</peer-review:completion-date>
|
||||||
|
<peer-review:review-group-id>issn:0167-4544</peer-review:review-group-id>
|
||||||
|
<peer-review:convening-organization>
|
||||||
|
<common:name>Springer Nature</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>New York</common:city>
|
||||||
|
<common:country>US</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>grid.467660.5</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>GRID</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</peer-review:convening-organization>
|
||||||
|
</peer-review:peer-review-summary>
|
||||||
|
</activities:peer-review-group>
|
||||||
|
<activities:peer-review-group>
|
||||||
|
<common:last-modified-date>2023-05-31T05:53:44.542Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>source-work-id</common:external-id-type>
|
||||||
|
<common:external-id-value>c442840b-5807-459d-802a-303d8ba4e25e</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">c442840b-5807-459d-802a-303d8ba4e25e</common:external-id-normalized>
|
||||||
|
<common:external-id-url></common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<peer-review:peer-review-summary put-code="9680570" path="/0000-0001-6816-8350/peer-review/9680570" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2023-05-31T05:53:44.542Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-05-31T05:53:44.542Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/APP-945VYTN20C7BZXYT</common:uri>
|
||||||
|
<common:path>APP-945VYTN20C7BZXYT</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>Springer Nature @ Editorial Manager</common:source-name>
|
||||||
|
</common:source>
|
||||||
|
<peer-review:reviewer-role>reviewer</peer-review:reviewer-role>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>source-work-id</common:external-id-type>
|
||||||
|
<common:external-id-value>c442840b-5807-459d-802a-303d8ba4e25e</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">c442840b-5807-459d-802a-303d8ba4e25e</common:external-id-normalized>
|
||||||
|
<common:external-id-url></common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<peer-review:review-type>review</peer-review:review-type>
|
||||||
|
<peer-review:completion-date>
|
||||||
|
<common:year>2023</common:year>
|
||||||
|
</peer-review:completion-date>
|
||||||
|
<peer-review:review-group-id>issn:0167-4544</peer-review:review-group-id>
|
||||||
|
<peer-review:convening-organization>
|
||||||
|
<common:name>Springer Nature</common:name>
|
||||||
|
<common:address>
|
||||||
|
<common:city>New York</common:city>
|
||||||
|
<common:country>US</common:country>
|
||||||
|
</common:address>
|
||||||
|
<common:disambiguated-organization>
|
||||||
|
<common:disambiguated-organization-identifier>grid.467660.5</common:disambiguated-organization-identifier>
|
||||||
|
<common:disambiguation-source>GRID</common:disambiguation-source>
|
||||||
|
</common:disambiguated-organization>
|
||||||
|
</peer-review:convening-organization>
|
||||||
|
</peer-review:peer-review-summary>
|
||||||
|
</activities:peer-review-group>
|
||||||
|
</activities:group>
|
||||||
|
</activities:peer-reviews>
|
||||||
|
<activities:qualifications path="/0000-0001-6816-8350/qualifications"/>
|
||||||
|
<activities:research-resources path="/0000-0001-6816-8350/research-resources"/>
|
||||||
|
<activities:services path="/0000-0001-6816-8350/services"/>
|
||||||
|
<activities:works path="/0000-0001-6816-8350/works">
|
||||||
|
<common:last-modified-date>2023-06-02T20:12:00.338Z</common:last-modified-date>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2023-06-02T20:12:00.338Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.4337/9781800881945.00020</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.4337/9781800881945.00020</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.4337/9781800881945.00020</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<work:work-summary put-code="134891279" path="/0000-0001-6816-8350/work/134891279" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2023-05-11T21:05:54.188Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-06-02T20:12:00.338Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/0000-0001-9884-1913</common:uri>
|
||||||
|
<common:path>0000-0001-9884-1913</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>Crossref</common:source-name>
|
||||||
|
</common:source>
|
||||||
|
<work:title>
|
||||||
|
<common:title>Ethical leadership as workplace innovation and enabler for employee commitment and innovative work behaviours in Vietnam</common:title>
|
||||||
|
</work:title>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.4337/9781800881945.00020</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.4337/9781800881945.00020</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.4337/9781800881945.00020</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<common:url>https://doi.org/10.4337/9781800881945.00020</common:url>
|
||||||
|
<work:type>book-chapter</work:type>
|
||||||
|
<common:publication-date>
|
||||||
|
<common:year>2023</common:year>
|
||||||
|
<common:month>05</common:month>
|
||||||
|
<common:day>26</common:day>
|
||||||
|
</common:publication-date>
|
||||||
|
</work:work-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2023-03-01T11:30:31.972Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.1007/s10551-022-05081-6</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.1007/s10551-022-05081-6</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.1007/s10551-022-05081-6</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<work:work-summary put-code="110048777" path="/0000-0001-6816-8350/work/110048777" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2022-03-18T03:36:55.927Z</common:created-date>
|
||||||
|
<common:last-modified-date>2023-03-01T11:30:31.972Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/0000-0001-9884-1913</common:uri>
|
||||||
|
<common:path>0000-0001-9884-1913</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>Crossref</common:source-name>
|
||||||
|
</common:source>
|
||||||
|
<work:title>
|
||||||
|
<common:title>Unethical Leadership: Review, Synthesis and Directions for Future Research</common:title>
|
||||||
|
</work:title>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.1007/s10551-022-05081-6</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.1007/s10551-022-05081-6</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.1007/s10551-022-05081-6</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<common:url>https://doi.org/10.1007/s10551-022-05081-6</common:url>
|
||||||
|
<work:type>journal-article</work:type>
|
||||||
|
<common:publication-date>
|
||||||
|
<common:year>2023</common:year>
|
||||||
|
<common:month>03</common:month>
|
||||||
|
</common:publication-date>
|
||||||
|
<work:journal-title>Journal of Business Ethics</work:journal-title>
|
||||||
|
</work:work-summary>
|
||||||
|
</activities:group>
|
||||||
|
<activities:group>
|
||||||
|
<common:last-modified-date>2022-05-28T18:16:16.575Z</common:last-modified-date>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.1017/jmo.2019.33</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.1017/jmo.2019.33</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.1017/jmo.2019.33</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<work:work-summary put-code="57272180" path="/0000-0001-6816-8350/work/57272180" visibility="public" display-index="0">
|
||||||
|
<common:created-date>2019-05-10T07:23:14.608Z</common:created-date>
|
||||||
|
<common:last-modified-date>2022-05-28T18:16:16.575Z</common:last-modified-date>
|
||||||
|
<common:source>
|
||||||
|
<common:source-client-id>
|
||||||
|
<common:uri>https://orcid.org/client/0000-0001-9884-1913</common:uri>
|
||||||
|
<common:path>0000-0001-9884-1913</common:path>
|
||||||
|
<common:host>orcid.org</common:host>
|
||||||
|
</common:source-client-id>
|
||||||
|
<common:source-name>Crossref</common:source-name>
|
||||||
|
</common:source>
|
||||||
|
<work:title>
|
||||||
|
<common:title>And now for something completely different: Reframing social processes of leadership theory using positive organisational behaviour</common:title>
|
||||||
|
</work:title>
|
||||||
|
<common:external-ids>
|
||||||
|
<common:external-id>
|
||||||
|
<common:external-id-type>doi</common:external-id-type>
|
||||||
|
<common:external-id-value>10.1017/jmo.2019.33</common:external-id-value>
|
||||||
|
<common:external-id-normalized transient="true">10.1017/jmo.2019.33</common:external-id-normalized>
|
||||||
|
<common:external-id-url>https://doi.org/10.1017/jmo.2019.33</common:external-id-url>
|
||||||
|
<common:external-id-relationship>self</common:external-id-relationship>
|
||||||
|
</common:external-id>
|
||||||
|
</common:external-ids>
|
||||||
|
<common:url>https://doi.org/10.1017/jmo.2019.33</common:url>
|
||||||
|
<work:type>journal-article</work:type>
|
||||||
|
<common:publication-date>
|
||||||
|
<common:year>2019</common:year>
|
||||||
|
<common:month>05</common:month>
|
||||||
|
<common:day>09</common:day>
|
||||||
|
</common:publication-date>
|
||||||
|
<work:journal-title>Journal of Management & Organization</work:journal-title>
|
||||||
|
</work:work-summary>
|
||||||
|
</activities:group>
|
||||||
|
</activities:works>
|
||||||
|
</activities:activities-summary>
|
||||||
|
</record:record>
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue