master #11
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dhp-build-assembly-resources</artifactId>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dhp-build-properties-maven-plugin</artifactId>
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-code-style</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
|
||||
<packaging>jar</packaging>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -49,6 +49,9 @@ public class ModelConstants {
|
|||
public static final String HAS_PARTICIPANT = "hasParticipant";
|
||||
public static final String IS_PARTICIPANT = "isParticipant";
|
||||
|
||||
public static final String UNKNOWN = "UNKNOWN";
|
||||
public static final String NOT_AVAILABLE = "not available";
|
||||
|
||||
public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
|
||||
PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
|
||||
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||
|
|
|
@ -41,6 +41,12 @@ public class Relation extends Oaf {
|
|||
*/
|
||||
private String target;
|
||||
|
||||
/**
|
||||
* List of relation specific properties. Values include 'similarityLevel', indicating the similarity score between a
|
||||
* pair of publications.
|
||||
*/
|
||||
private List<KeyValue> properties = new ArrayList<>();
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
@ -81,6 +87,14 @@ public class Relation extends Oaf {
|
|||
this.target = target;
|
||||
}
|
||||
|
||||
public List<KeyValue> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(List<KeyValue> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public void mergeFrom(final Relation r) {
|
||||
|
||||
checkArgument(Objects.equals(getSource(), r.getSource()), "source ids must be equal");
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-actionmanager</artifactId>
|
||||
|
||||
|
|
|
@ -84,8 +84,11 @@ public class MigrateActionSet {
|
|||
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
||||
log
|
||||
.info(
|
||||
"paths to process:\n{}",
|
||||
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
||||
"paths to process:\n{}", sourcePaths
|
||||
.stream()
|
||||
.map(p -> p.toString())
|
||||
.collect(Collectors.joining("\n")));
|
||||
|
||||
for (Path source : sourcePaths) {
|
||||
|
||||
if (!sourceFS.exists(source)) {
|
||||
|
@ -119,9 +122,8 @@ public class MigrateActionSet {
|
|||
}
|
||||
}
|
||||
|
||||
props
|
||||
.setProperty(
|
||||
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||
final String targetPathsCsv = targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(","));
|
||||
props.setProperty(TARGET_PATHS, targetPathsCsv);
|
||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||
|
||||
try (OutputStream os = new FileOutputStream(file)) {
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
|
||||
package eu.dnetlib.dhp.actionmanager.migration;
|
||||
|
||||
import static eu.dnetlib.data.proto.KindProtos.Kind.entity;
|
||||
import static eu.dnetlib.data.proto.KindProtos.Kind.relation;
|
||||
import static eu.dnetlib.data.proto.TypeProtos.*;
|
||||
import static eu.dnetlib.data.proto.TypeProtos.Type.*;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -21,10 +19,6 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
|||
|
||||
public class ProtoConverter implements Serializable {
|
||||
|
||||
public static final String UNKNOWN = "UNKNOWN";
|
||||
public static final String NOT_AVAILABLE = "not available";
|
||||
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
|
||||
|
||||
public static Oaf convert(OafProtos.Oaf oaf) {
|
||||
try {
|
||||
switch (oaf.getKind()) {
|
||||
|
@ -64,6 +58,7 @@ public class ProtoConverter implements Serializable {
|
|||
case result:
|
||||
final Result r = convertResult(oaf);
|
||||
r.setInstance(convertInstances(oaf));
|
||||
r.setExternalReference(convertExternalRefs(oaf));
|
||||
return r;
|
||||
case project:
|
||||
return convertProject(oaf);
|
||||
|
@ -94,13 +89,44 @@ public class ProtoConverter implements Serializable {
|
|||
i.setHostedby(mapKV(ri.getHostedby()));
|
||||
i.setInstancetype(mapQualifier(ri.getInstancetype()));
|
||||
i.setLicense(mapStringField(ri.getLicense()));
|
||||
i.setUrl(ri.getUrlList());
|
||||
i
|
||||
.setUrl(
|
||||
ri.getUrlList() != null ? ri
|
||||
.getUrlList()
|
||||
.stream()
|
||||
.distinct()
|
||||
.collect(Collectors.toCollection(ArrayList::new)) : null);
|
||||
i.setRefereed(mapStringField(ri.getRefereed()));
|
||||
i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount()));
|
||||
i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency()));
|
||||
return i;
|
||||
}
|
||||
|
||||
private static List<ExternalReference> convertExternalRefs(OafProtos.Oaf oaf) {
|
||||
ResultProtos.Result r = oaf.getEntity().getResult();
|
||||
if (r.getExternalReferenceCount() > 0) {
|
||||
return r
|
||||
.getExternalReferenceList()
|
||||
.stream()
|
||||
.map(e -> convertExtRef(e))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
private static ExternalReference convertExtRef(ResultProtos.Result.ExternalReference e) {
|
||||
ExternalReference ex = new ExternalReference();
|
||||
ex.setUrl(e.getUrl());
|
||||
ex.setSitename(e.getSitename());
|
||||
ex.setRefidentifier(e.getRefidentifier());
|
||||
ex.setQuery(e.getQuery());
|
||||
ex.setQualifier(mapQualifier(e.getQualifier()));
|
||||
ex.setLabel(e.getLabel());
|
||||
ex.setDescription(e.getDescription());
|
||||
ex.setDataInfo(ex.getDataInfo());
|
||||
return ex;
|
||||
}
|
||||
|
||||
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
||||
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
|
||||
final Organization org = setOaf(new Organization(), oaf);
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-aggregation</artifactId>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.model;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class Event {
|
||||
|
||||
private String eventId;
|
||||
|
||||
private String producerId;
|
||||
|
||||
private String topic;
|
||||
|
||||
private String payload;
|
||||
|
||||
private Long creationDate;
|
||||
|
||||
private Long expiryDate;
|
||||
|
||||
private boolean instantMessage;
|
||||
|
||||
private Map<String, Object> map;
|
||||
|
||||
public Event() {
|
||||
}
|
||||
|
||||
public Event(final String producerId, final String eventId, final String topic, final String payload,
|
||||
final Long creationDate, final Long expiryDate,
|
||||
final boolean instantMessage,
|
||||
final Map<String, Object> map) {
|
||||
this.producerId = producerId;
|
||||
this.eventId = eventId;
|
||||
this.topic = topic;
|
||||
this.payload = payload;
|
||||
this.creationDate = creationDate;
|
||||
this.expiryDate = expiryDate;
|
||||
this.instantMessage = instantMessage;
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
public String getProducerId() {
|
||||
return this.producerId;
|
||||
}
|
||||
|
||||
public void setProducerId(final String producerId) {
|
||||
this.producerId = producerId;
|
||||
}
|
||||
|
||||
public String getEventId() {
|
||||
return this.eventId;
|
||||
}
|
||||
|
||||
public void setEventId(final String eventId) {
|
||||
this.eventId = eventId;
|
||||
}
|
||||
|
||||
public String getTopic() {
|
||||
return this.topic;
|
||||
}
|
||||
|
||||
public void setTopic(final String topic) {
|
||||
this.topic = topic;
|
||||
}
|
||||
|
||||
public String getPayload() {
|
||||
return this.payload;
|
||||
}
|
||||
|
||||
public void setPayload(final String payload) {
|
||||
this.payload = payload;
|
||||
}
|
||||
|
||||
public Long getCreationDate() {
|
||||
return this.creationDate;
|
||||
}
|
||||
|
||||
public void setCreationDate(final Long creationDate) {
|
||||
this.creationDate = creationDate;
|
||||
}
|
||||
|
||||
public Long getExpiryDate() {
|
||||
return this.expiryDate;
|
||||
}
|
||||
|
||||
public void setExpiryDate(final Long expiryDate) {
|
||||
this.expiryDate = expiryDate;
|
||||
}
|
||||
|
||||
public boolean isInstantMessage() {
|
||||
return this.instantMessage;
|
||||
}
|
||||
|
||||
public void setInstantMessage(final boolean instantMessage) {
|
||||
this.instantMessage = instantMessage;
|
||||
}
|
||||
|
||||
public Map<String, Object> getMap() {
|
||||
return this.map;
|
||||
}
|
||||
|
||||
public void setMap(final Map<String, Object> map) {
|
||||
this.map = map;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.model;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
public class EventFactory {
|
||||
|
||||
private final static String PRODUCER_ID = "OpenAIRE";
|
||||
|
||||
private static final int TTH_DAYS = 365;
|
||||
|
||||
private final static String[] DATE_PATTERNS = {
|
||||
"yyyy-MM-dd"
|
||||
};
|
||||
|
||||
public static Event newBrokerEvent(final Result source, final Result target, final UpdateInfo<?> updateInfo) {
|
||||
|
||||
final long now = new Date().getTime();
|
||||
|
||||
final Event res = new Event();
|
||||
|
||||
final Map<String, Object> map = createMapFromResult(target, source, updateInfo);
|
||||
|
||||
final String payload = createPayload(target, updateInfo);
|
||||
|
||||
final String eventId = calculateEventId(
|
||||
updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
|
||||
|
||||
res.setEventId(eventId);
|
||||
res.setProducerId(PRODUCER_ID);
|
||||
res.setPayload(payload);
|
||||
res.setMap(map);
|
||||
res.setTopic(updateInfo.getTopic());
|
||||
res.setCreationDate(now);
|
||||
res.setExpiryDate(calculateExpiryDate(now));
|
||||
res.setInstantMessage(false);
|
||||
return res;
|
||||
}
|
||||
|
||||
private static String createPayload(final Result result, final UpdateInfo<?> updateInfo) {
|
||||
final OpenAireEventPayload payload = new OpenAireEventPayload();
|
||||
// TODO
|
||||
|
||||
updateInfo.compileHighlight(payload);
|
||||
|
||||
return payload.toJSON();
|
||||
}
|
||||
|
||||
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source,
|
||||
final UpdateInfo<?> updateInfo) {
|
||||
final Map<String, Object> map = new HashMap<>();
|
||||
|
||||
final List<KeyValue> collectedFrom = oaf.getCollectedfrom();
|
||||
if (collectedFrom.size() == 1) {
|
||||
map.put("target_datasource_id", collectedFrom.get(0).getKey());
|
||||
map.put("target_datasource_name", collectedFrom.get(0).getValue());
|
||||
}
|
||||
|
||||
final List<String> ids = oaf.getOriginalId();
|
||||
if (ids.size() > 0) {
|
||||
map.put("target_publication_id", ids.get(0));
|
||||
}
|
||||
|
||||
final List<StructuredProperty> titles = oaf.getTitle();
|
||||
if (titles.size() > 0) {
|
||||
map.put("target_publication_title", titles.get(0));
|
||||
}
|
||||
|
||||
final long date = parseDateTolong(oaf.getDateofacceptance().getValue());
|
||||
if (date > 0) {
|
||||
map.put("target_dateofacceptance", date);
|
||||
}
|
||||
|
||||
final List<StructuredProperty> subjects = oaf.getSubject();
|
||||
if (subjects.size() > 0) {
|
||||
map
|
||||
.put(
|
||||
"target_publication_subject_list",
|
||||
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
final List<Author> authors = oaf.getAuthor();
|
||||
if (authors.size() > 0) {
|
||||
map
|
||||
.put(
|
||||
"target_publication_author_list",
|
||||
authors.stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
// PROVENANCE INFO
|
||||
map.put("trust", updateInfo.getTrust());
|
||||
final List<KeyValue> sourceCollectedFrom = source.getCollectedfrom();
|
||||
if (sourceCollectedFrom.size() == 1) {
|
||||
map.put("provenance_datasource_id", sourceCollectedFrom.get(0).getKey());
|
||||
map.put("provenance_datasource_name", sourceCollectedFrom.get(0).getValue());
|
||||
}
|
||||
map.put("provenance_publication_id_list", source.getOriginalId());
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
private static String calculateEventId(final String topic, final String publicationId, final String value) {
|
||||
return "event-"
|
||||
+ DigestUtils.md5Hex(topic).substring(0, 6) + "-"
|
||||
+ DigestUtils.md5Hex(publicationId).substring(0, 8) + "-"
|
||||
+ DigestUtils.md5Hex(value).substring(0, 8);
|
||||
}
|
||||
|
||||
private static long calculateExpiryDate(final long now) {
|
||||
return now + TTH_DAYS * 24 * 60 * 60 * 1000;
|
||||
}
|
||||
|
||||
private static long parseDateTolong(final String date) {
|
||||
if (StringUtils.isBlank(date)) {
|
||||
return -1;
|
||||
}
|
||||
try {
|
||||
return DateUtils.parseDate(date, DATE_PATTERNS).getTime();
|
||||
} catch (final ParseException e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.broker.model.Event;
|
||||
import eu.dnetlib.dhp.broker.model.EventFactory;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAbstract;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAuthorOrcid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingProject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPublicationDate;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMorePid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class GenerateEventsApplication {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GenerateEventsApplication.class);
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
GenerateEventsApplication.class
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
|
||||
parser.parseArgument(args);
|
||||
|
||||
final Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String graphPath = parser.get("graphPath");
|
||||
log.info("graphPath: {}", graphPath);
|
||||
|
||||
final String eventsPath = parser.get("eventsPath");
|
||||
log.info("eventsPath: {}", eventsPath);
|
||||
|
||||
final SparkConf conf = new SparkConf();
|
||||
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
||||
removeOutputDir(spark, eventsPath);
|
||||
generateEvents(spark, graphPath, eventsPath);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void removeOutputDir(final SparkSession spark, final String path) {
|
||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||
}
|
||||
|
||||
private static void generateEvents(final SparkSession spark, final String graphPath, final String eventsPath) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
private List<Event> generateEvents(final Result... children) {
|
||||
final List<Event> list = new ArrayList<>();
|
||||
|
||||
for (final Result source : children) {
|
||||
for (final Result target : children) {
|
||||
if (source != target) {
|
||||
list
|
||||
.addAll(
|
||||
findUpdates(source, target)
|
||||
.stream()
|
||||
.map(info -> EventFactory.newBrokerEvent(source, target, info))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private List<UpdateInfo<?>> findUpdates(final Result source, final Result target) {
|
||||
final List<UpdateInfo<?>> list = new ArrayList<>();
|
||||
list.addAll(EnrichMissingAbstract.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingAuthorOrcid.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingOpenAccess.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingPid.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingProject.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingPublicationDate.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingSubject.findUpdates(source, target));
|
||||
list.addAll(EnrichMoreOpenAccess.findUpdates(source, target));
|
||||
list.addAll(EnrichMorePid.findUpdates(source, target));
|
||||
list.addAll(EnrichMoreSubject.findUpdates(source, target));
|
||||
return list;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingAbstract extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingAbstract> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingAbstract(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/ABSTRACT", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getAbstracts().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingAuthorOrcid extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingAuthorOrcid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingAuthorOrcid(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/AUTHOR/ORCID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingOpenAccess extends UpdateInfo<Instance> {
|
||||
|
||||
public static List<EnrichMissingOpenAccess> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingOpenAccess(final Instance highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/OPENACCESS_VERSION", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getUrl();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingPid extends UpdateInfo<Pid> {
|
||||
|
||||
public static List<EnrichMissingPid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingPid(final Pid highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getPids().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingProject extends UpdateInfo<Project> {
|
||||
|
||||
public static List<EnrichMissingProject> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingProject(final Project highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PROJECT", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getProjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram()
|
||||
+ getHighlightValue().getCode();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingPublicationDate extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingPublicationDate> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingPublicationDate(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PUBLICATION_DATE", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().setPublicationdate(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingSubject extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingSubject> findUpdates(final Result source, final Result target) {
|
||||
// MESHEUROPMC
|
||||
// ARXIV
|
||||
// JEL
|
||||
// DDC
|
||||
// ACM
|
||||
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMoreOpenAccess extends UpdateInfo<Instance> {
|
||||
|
||||
public static List<EnrichMoreOpenAccess> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMoreOpenAccess(final Instance highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/OPENACCESS_VERSION", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getUrl();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMorePid extends UpdateInfo<Pid> {
|
||||
|
||||
public static List<EnrichMorePid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMorePid(final Pid highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/PID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getPids().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMoreSubject extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMoreSubject> findUpdates(final Result source, final Result target) {
|
||||
// MESHEUROPMC
|
||||
// ARXIV
|
||||
// JEL
|
||||
// DDC
|
||||
// ACM
|
||||
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMoreSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
|
||||
public abstract class UpdateInfo<T> {
|
||||
|
||||
private final String topic;
|
||||
|
||||
private final T highlightValue;
|
||||
|
||||
private final float trust;
|
||||
|
||||
protected UpdateInfo(final String topic, final T highlightValue, final float trust) {
|
||||
this.topic = topic;
|
||||
this.highlightValue = highlightValue;
|
||||
this.trust = trust;
|
||||
}
|
||||
|
||||
public T getHighlightValue() {
|
||||
return highlightValue;
|
||||
}
|
||||
|
||||
public float getTrust() {
|
||||
return trust;
|
||||
}
|
||||
|
||||
public String getTopic() {
|
||||
return topic;
|
||||
}
|
||||
|
||||
abstract public void compileHighlight(OpenAireEventPayload payload);
|
||||
|
||||
abstract public String getHighlightValueAsString();
|
||||
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.1.8-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
|||
public class SparkBulkTagJob {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkBulkTagJob.class);
|
||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
|
@ -108,12 +109,12 @@ public class SparkBulkTagJob {
|
|||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static <R> Dataset<R> readPath(
|
||||
SparkSession spark, String inputEntityPath, Class<R> clazz) {
|
||||
public static <R> Dataset<R> readPath(
|
||||
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||
return spark
|
||||
.read()
|
||||
.json(inputEntityPath)
|
||||
.as(Encoders.bean(clazz));
|
||||
.textFile(inputPath)
|
||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<workflow-app name="bulk_tagging" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
|
@ -16,24 +16,18 @@
|
|||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
</parameters>
|
||||
</parameters>
|
||||
|
||||
<start to="reset-outputpath"/>
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
</kill>
|
||||
|
||||
<action name="reset-outputpath">
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -67,6 +61,7 @@
|
|||
<ok to="copy_wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_projects">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -91,14 +86,13 @@
|
|||
|
||||
<join name="copy_wait" to="fork_exec_bulktag"/>
|
||||
|
||||
|
||||
|
||||
<fork name="fork_exec_bulktag">
|
||||
<fork name="fork_exec_bulktag">
|
||||
<path start="join_bulktag_publication"/>
|
||||
<path start="join_bulktag_dataset"/>
|
||||
<path start="join_bulktag_otherresearchproduct"/>
|
||||
<path start="join_bulktag_software"/>
|
||||
</fork>
|
||||
</fork>
|
||||
|
||||
<action name="join_bulktag_publication">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -127,6 +121,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_bulktag_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -155,6 +150,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_bulktag_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -183,6 +179,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_bulktag_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -211,6 +208,9 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<join name="wait" to="End"/>
|
||||
<end name="End"/>
|
||||
|
||||
<join name="wait" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>dhp-dedup-openaire</artifactId>
|
||||
|
|
|
@ -137,10 +137,14 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
|||
}
|
||||
|
||||
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
|
||||
|
||||
String entityType = dedupConf.getWf().getEntityType();
|
||||
|
||||
Relation r = new Relation();
|
||||
r.setSource(source);
|
||||
r.setTarget(target);
|
||||
r.setRelClass(relClass);
|
||||
r.setRelType(entityType + entityType.substring(0, 1).toUpperCase() + entityType.substring(1));
|
||||
r.setSubRelType("dedup");
|
||||
|
||||
DataInfo info = new DataInfo();
|
||||
|
|
|
@ -86,7 +86,8 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
mergedIds,
|
||||
FieldType.TARGET,
|
||||
getFixRelFn(FieldType.TARGET))
|
||||
.filter(SparkPropagateRelation::containsDedup);
|
||||
.filter(SparkPropagateRelation::containsDedup)
|
||||
.distinct();
|
||||
|
||||
Dataset<Relation> updated = processDataset(
|
||||
processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()),
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.apache.spark.SparkConf;
|
|||
import org.apache.spark.api.java.JavaPairRDD;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.api.java.function.PairFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
|
@ -29,6 +30,8 @@ import org.mockito.Mock;
|
|||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
|
@ -420,7 +423,7 @@ public class SparkDedupTest implements Serializable {
|
|||
|
||||
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
|
||||
|
||||
assertEquals(5022, relations);
|
||||
assertEquals(4975, relations);
|
||||
|
||||
// check deletedbyinference
|
||||
final Dataset<Relation> mergeRels = spark
|
||||
|
@ -450,6 +453,25 @@ public class SparkDedupTest implements Serializable {
|
|||
assertEquals(updated, deletedbyinference);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(6)
|
||||
public void testRelations() throws Exception {
|
||||
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_1.json", 12, 10);
|
||||
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_2.json", 10, 2);
|
||||
}
|
||||
|
||||
private void testUniqueness(String path, int expected_total, int expected_unique) {
|
||||
Dataset<Relation> rel = spark
|
||||
.read()
|
||||
.textFile(getClass().getResource(path).getPath())
|
||||
.map(
|
||||
(MapFunction<String, Relation>) s -> new ObjectMapper().readValue(s, Relation.class),
|
||||
Encoders.bean(Relation.class));
|
||||
|
||||
assertEquals(expected_total, rel.count());
|
||||
assertEquals(expected_unique, rel.distinct().count());
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void finalCleanUp() throws IOException {
|
||||
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::40c7b1dfa18c3693d374dafd21ef852f","subRelType":"provision","target":"10|doajarticles::618df40624078491acfd93ca3ff6921c"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::0b4e756a73338f60b84de98d080f6422","subRelType":"provision","target":"10|doajarticles::6d01e689db13b6977b411f4170b6143b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::fe2f7c9d350b9c5aa658ec384d761e33","subRelType":"provision","target":"10|doajarticles::9b8a956b0703854ba79e52ddf7dc552e"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::a116734108ba011ef715b012f095e3f5","subRelType":"provision","target":"10|doajarticles::c5de04b1a35da2cc4468e299bc9ffa16"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::8b83abbbcad5496fe43cda88d0045aa4","subRelType":"provision","target":"10|opendoar____::6855456e2fe46a9d49d3d3af4f57443d"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::88034de0247d9d36e22783e9319c5ba3","subRelType":"provision","target":"10|opendoar____::c17028c9b6e0c5deaad29665d582284a"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::dfb21c796f33e9acf505cc960a3d8d2c","subRelType":"provision","target":"10|opendoar____::dfa037a53e121ecc9e0926800c3e814e"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::b526b1aa1562038881a31be59896985f","subRelType":"provision","target":"10|re3data_____::2e457773b62df3534cc04441bf406a70"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::6b306183bc051b5aaa5376f2fab6e6e5","subRelType":"provision","target":"10|re3data_____::6371ff9ee1ec7073416cb83c868b10a3"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
||||
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
|
@ -0,0 +1,10 @@
|
|||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681629"}
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -5,10 +5,8 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId
|
|||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dom4j.Document;
|
||||
|
@ -115,12 +113,17 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
.setProcessingchargecurrency(
|
||||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||
|
||||
for (final Object o : doc.selectNodes("//dc:identifier")) {
|
||||
final String url = ((Node) o).getText().trim();
|
||||
if (url.startsWith("http")) {
|
||||
instance.setUrl(Arrays.asList(url));
|
||||
}
|
||||
}
|
||||
List<Node> nodes = Lists.newArrayList(doc.selectNodes("//dc:identifier"));
|
||||
instance
|
||||
.setUrl(
|
||||
nodes
|
||||
.stream()
|
||||
.filter(n -> StringUtils.isNotBlank(n.getText()))
|
||||
.map(n -> n.getText().trim())
|
||||
.filter(u -> u.startsWith("http"))
|
||||
.distinct()
|
||||
.collect(Collectors.toCollection(ArrayList::new)));
|
||||
|
||||
return Lists.newArrayList(instance);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,10 +6,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
|||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dom4j.Document;
|
||||
|
@ -80,6 +77,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
final KeyValue hostedby) {
|
||||
|
||||
final Instance instance = new Instance();
|
||||
final Set<String> url = new HashSet<>();
|
||||
instance.setUrl(new ArrayList<>());
|
||||
instance
|
||||
.setInstancetype(
|
||||
|
@ -100,17 +98,18 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||
|
||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
|
||||
instance.getUrl().add(((Node) o).getText().trim());
|
||||
url.add(((Node) o).getText().trim());
|
||||
}
|
||||
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
|
||||
instance.getUrl().add(((Node) o).getText().trim());
|
||||
url.add(((Node) o).getText().trim());
|
||||
}
|
||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
|
||||
instance.getUrl().add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||
}
|
||||
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) {
|
||||
instance.getUrl().add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||
}
|
||||
instance.getUrl().addAll(url);
|
||||
return Arrays.asList(instance);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -405,6 +405,9 @@
|
|||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.speculation=false
|
||||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--inputPath</arg><arg>${workingDir}/xml</arg>
|
||||
<arg>--isLookupUrl</arg> <arg>${isLookupUrl}</arg>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.1.8-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -27,14 +27,8 @@
|
|||
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path="${outputPath}/relation"/>
|
||||
<delete path="${outputPath}/dataset"/>
|
||||
<delete path="${outputPath}/software"/>
|
||||
<delete path="${outputPath}/publication"/>
|
||||
<delete path="${outputPath}/otherresearchproduct"/>
|
||||
<delete path="${outputPath}/project"/>
|
||||
<delete path="${outputPath}/organization"/>
|
||||
<delete path="${outputPath}/datasource"/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<workflow-app name="orcid_to_result_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
|
@ -12,26 +12,18 @@
|
|||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
</parameters>
|
||||
<start to="reset-outputpath"/>
|
||||
</parameters>
|
||||
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<!-- <start to="fork_prepare_assoc_step1"/>-->
|
||||
|
||||
<kill name="Kill">
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
<action name="reset-outputpath">
|
||||
</kill>
|
||||
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${workingDir}/preparedInfo'/>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -119,11 +111,16 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -147,11 +144,16 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -175,11 +177,16 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -203,11 +210,16 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -233,11 +245,13 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
</spark>
|
||||
<ok to="fork-join-exec-propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
|
||||
</action>
|
||||
|
@ -270,12 +284,18 @@
|
|||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/publication</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/publication</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -301,12 +321,18 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/dataset</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/dataset</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/dataset</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -332,12 +358,18 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/otherresearchproduct</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -363,16 +395,22 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/software</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/software</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/software</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<join name="wait2" to="End"/>
|
||||
<end name="End"/>
|
||||
<end name="End"/>
|
||||
</workflow-app>
|
|
@ -14,27 +14,21 @@
|
|||
</property>
|
||||
</parameters>
|
||||
|
||||
<start to="reset-outputpath"/>
|
||||
<!-- <start to="apply_propagation"/>-->
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="reset-outputpath">
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${workingDir}/preparedInfo'/>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="copy_entities">
|
||||
<path start="copy_relation"/>
|
||||
<path start="copy_publication"/>
|
||||
|
@ -190,4 +184,5 @@
|
|||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -14,23 +14,16 @@
|
|||
</property>
|
||||
</parameters>
|
||||
|
||||
<start to="reset-outputpath"/>
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="reset-outputpath">
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${workingDir}/preparedInfo'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -64,6 +57,7 @@
|
|||
<ok to="copy_wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_projects">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -85,6 +79,7 @@
|
|||
<ok to="copy_wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="copy_wait" to="prepare_result_communitylist"/>
|
||||
|
||||
<action name="prepare_result_communitylist">
|
||||
|
@ -111,7 +106,6 @@
|
|||
<arg>--organizationtoresultcommunitymap</arg><arg>${organizationtoresultcommunitymap}</arg>
|
||||
</spark>
|
||||
<ok to="fork-join-exec-propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
@ -150,6 +144,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -178,6 +173,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -206,6 +202,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -238,4 +235,5 @@
|
|||
<join name="wait2" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -1,5 +1,5 @@
|
|||
<workflow-app name="result_to_community_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
|
@ -16,28 +16,23 @@
|
|||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
</parameters>
|
||||
</parameters>
|
||||
|
||||
<start to="reset-outputpath"/>
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
<action name="reset-outputpath">
|
||||
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${workingDir}/preparedInfo'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="copy_entities">
|
||||
<path start="copy_relation"/>
|
||||
<path start="copy_organization"/>
|
||||
|
@ -126,6 +121,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_prepare_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -182,6 +178,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_prepare_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -235,7 +232,7 @@
|
|||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg>
|
||||
</spark>
|
||||
<ok to="fork-join-exec-propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
|
||||
</action>
|
||||
|
@ -275,6 +272,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -303,6 +301,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -331,6 +330,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagate_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -362,5 +362,6 @@
|
|||
|
||||
<join name="wait2" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -1,5 +1,5 @@
|
|||
<workflow-app name="affiliation_from_instrepo_propagation" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
|
@ -8,25 +8,18 @@
|
|||
<name>outputPath</name>
|
||||
<description>sets the outputPath</description>
|
||||
</property>
|
||||
</parameters>
|
||||
</parameters>
|
||||
|
||||
<start to="reset-outputpath"/>
|
||||
<!--<start to="prepare_result_organization_association"/>-->
|
||||
<kill name="Kill">
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
</kill>
|
||||
|
||||
<action name="reset-outputpath">
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path='${outputPath}/relation'/>
|
||||
<delete path='${outputPath}/datasource'/>
|
||||
<delete path='${outputPath}/organization'/>
|
||||
<delete path='${outputPath}/project'/>
|
||||
<delete path='${outputPath}/publication'/>
|
||||
<delete path='${outputPath}/dataset'/>
|
||||
<delete path='${outputPath}/software'/>
|
||||
<delete path='${outputPath}/otherresearchproduct'/>
|
||||
<delete path='${workingDir}/preparedInfo'/>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -53,6 +46,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_publication">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -85,6 +79,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_software">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -95,6 +90,7 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_organization">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
|
@ -151,9 +147,9 @@
|
|||
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
|
||||
</spark>
|
||||
<ok to="fork_join_apply_resulttoorganization_propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="fork_join_apply_resulttoorganization_propagation">
|
||||
<path start="join_propagation_publication"/>
|
||||
<path start="join_propagation_dataset"/>
|
||||
|
@ -220,6 +216,7 @@
|
|||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_propagation_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
|
@ -281,6 +278,7 @@
|
|||
</action>
|
||||
|
||||
<join name="wait2" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>dhp-stats-update</artifactId>
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages;
|
||||
CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from openaire.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages;
|
||||
CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from ${openaire_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
Loading…
Reference in New Issue