merged from branch

This commit is contained in:
Sandro La Bruzzo 2020-05-11 09:45:50 +02:00
commit 1412158a6f
71 changed files with 3421 additions and 1279 deletions

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-build</artifactId> <artifactId>dhp-build</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>dhp-build-assembly-resources</artifactId> <artifactId>dhp-build-assembly-resources</artifactId>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-build</artifactId> <artifactId>dhp-build</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>dhp-build-properties-maven-plugin</artifactId> <artifactId>dhp-build-properties-maven-plugin</artifactId>

View File

@ -1,13 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-code-style</artifactId> <artifactId>dhp-code-style</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp</artifactId> <artifactId>dhp</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>dhp-build</artifactId> <artifactId>dhp-build</artifactId>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp</artifactId> <artifactId>dhp</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
<relativePath>../</relativePath> <relativePath>../</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp</artifactId> <artifactId>dhp</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
<relativePath>../</relativePath> <relativePath>../</relativePath>
</parent> </parent>

View File

@ -6,36 +6,86 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class ModelConstants { public class ModelConstants {
public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies"; public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
public static final String DNET_PUBLICATION_RESOURCE = "dnet:publication_resource";
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
public static final String DNET_LANGUAGES = "dnet:languages";
public static final String DNET_PID_TYPES = "dnet:pid_types";
public static final String DNET_DATA_CITE_DATE = "dnet:dataCite_date";
public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource";
public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions";
public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository";
public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry";
public static final String USER_CLAIM = "user:claim";
public static final String DATASET_RESULTTYPE_CLASSID = "dataset"; public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication"; public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
public static final String SOFTWARE_RESULTTYPE_CLASSID = "software"; public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
public static final String ORP_RESULTTYPE_CLASSID = "other"; public static final String ORP_RESULTTYPE_CLASSID = "other";
public static Qualifier PUBLICATION_DEFAULT_RESULTTYPE = new Qualifier(); public static final String RESULT_RESULT = "resultResult";
public static Qualifier DATASET_DEFAULT_RESULTTYPE = new Qualifier(); public static final String PUBLICATION_DATASET = "publicationDataset";
public static Qualifier SOFTWARE_DEFAULT_RESULTTYPE = new Qualifier(); public static final String IS_RELATED_TO = "isRelatedTo";
public static Qualifier ORP_DEFAULT_RESULTTYPE = new Qualifier(); public static final String SUPPLEMENT = "supplement";
public static final String IS_SUPPLEMENT_TO = "isSupplementTo";
public static final String IS_SUPPLEMENTED_BY = "isSupplementedBy";
public static final String PART = "part";
public static final String IS_PART_OF = "IsPartOf";
public static final String HAS_PARTS = "HasParts";
public static final String RELATIONSHIP = "relationship";
static { public static final String RESULT_PROJECT = "resultProject";
PUBLICATION_DEFAULT_RESULTTYPE.setClassid(PUBLICATION_RESULTTYPE_CLASSID); public static final String OUTCOME = "outcome";
PUBLICATION_DEFAULT_RESULTTYPE.setClassname(PUBLICATION_RESULTTYPE_CLASSID); public static final String IS_PRODUCED_BY = "isProducedBy";
PUBLICATION_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String PRODUCES = "produces";
PUBLICATION_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
DATASET_DEFAULT_RESULTTYPE.setClassid(DATASET_RESULTTYPE_CLASSID); public static final String DATASOURCE_ORGANIZATION = "datasourceOrganization";
DATASET_DEFAULT_RESULTTYPE.setClassname(DATASET_RESULTTYPE_CLASSID); public static final String PROVISION = "provision";
DATASET_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String IS_PROVIDED_BY = "isProvidedBy";
DATASET_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES); public static final String PROVIDES = "provides";
SOFTWARE_DEFAULT_RESULTTYPE.setClassid(SOFTWARE_RESULTTYPE_CLASSID); public static final String PROJECT_ORGANIZATION = "projectOrganization";
SOFTWARE_DEFAULT_RESULTTYPE.setClassname(SOFTWARE_RESULTTYPE_CLASSID); public static final String PARTICIPATION = "participation";
SOFTWARE_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String HAS_PARTICIPANT = "hasParticipant";
SOFTWARE_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES); public static final String IS_PARTICIPANT = "isParticipant";
ORP_DEFAULT_RESULTTYPE.setClassid(ORP_RESULTTYPE_CLASSID); public static final String UNKNOWN = "UNKNOWN";
ORP_DEFAULT_RESULTTYPE.setClassname(ORP_RESULTTYPE_CLASSID); public static final String NOT_AVAILABLE = "not available";
ORP_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
ORP_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES); public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier DATASET_DEFAULT_RESULTTYPE = qualifier(
DATASET_RESULTTYPE_CLASSID, DATASET_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier SOFTWARE_DEFAULT_RESULTTYPE = qualifier(
SOFTWARE_RESULTTYPE_CLASSID, SOFTWARE_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier ORP_DEFAULT_RESULTTYPE = qualifier(
ORP_RESULTTYPE_CLASSID, ORP_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
SYSIMPORT_CROSSWALK_REPOSITORY, SYSIMPORT_CROSSWALK_REPOSITORY,
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
SYSIMPORT_CROSSWALK_ENTITYREGISTRY, SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
private static Qualifier qualifier(
final String classid,
final String classname,
final String schemeid,
final String schemename) {
final Qualifier q = new Qualifier();
q.setClassid(classid);
q.setClassname(classname);
q.setSchemeid(schemeid);
q.setSchemename(schemename);
return q;
} }
} }

View File

@ -7,6 +7,9 @@ import java.util.Objects;
public abstract class Oaf implements Serializable { public abstract class Oaf implements Serializable {
/**
* The list of datasource id/name pairs providing this relationship.
*/
protected List<KeyValue> collectedfrom; protected List<KeyValue> collectedfrom;
private DataInfo dataInfo; private DataInfo dataInfo;

View File

@ -7,16 +7,38 @@ import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/**
* Relation models any edge between two nodes in the OpenAIRE graph. It has a source id and a target id pointing to
* graph node identifiers and it is further characterised by the semantic of the link through the fields relType,
* subRelType and relClass. Provenance information is modeled according to the dataInfo element and collectedFrom, while
* individual relationship types can provide extra information via the properties field.
*/
public class Relation extends Oaf { public class Relation extends Oaf {
/**
* Main relationship classifier, values include 'resultResult', 'resultProject', 'resultOrganization', etc.
*/
private String relType; private String relType;
/**
* Further classifies a relationship, values include 'affiliation', 'similarity', 'supplement', etc.
*/
private String subRelType; private String subRelType;
/**
* Indicates the direction of the relationship, values include 'isSupplementTo', 'isSupplementedBy', 'merges,
* 'isMergedIn'.
*/
private String relClass; private String relClass;
/**
* The source entity id.
*/
private String source; private String source;
/**
* The target entity id.
*/
private String target; private String target;
public String getRelType() { public String getRelType() {

View File

@ -1,11 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>dhp-actionmanager</artifactId> <artifactId>dhp-actionmanager</artifactId>

View File

@ -84,8 +84,11 @@ public class MigrateActionSet {
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp); final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
log log
.info( .info(
"paths to process:\n{}", "paths to process:\n{}", sourcePaths
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n"))); .stream()
.map(p -> p.toString())
.collect(Collectors.joining("\n")));
for (Path source : sourcePaths) { for (Path source : sourcePaths) {
if (!sourceFS.exists(source)) { if (!sourceFS.exists(source)) {
@ -119,9 +122,8 @@ public class MigrateActionSet {
} }
} }
props final String targetPathsCsv = targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(","));
.setProperty( props.setProperty(TARGET_PATHS, targetPathsCsv);
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
File file = new File(System.getProperty("oozie.action.output.properties")); File file = new File(System.getProperty("oozie.action.output.properties"));
try (OutputStream os = new FileOutputStream(file)) { try (OutputStream os = new FileOutputStream(file)) {

View File

@ -1,12 +1,10 @@
package eu.dnetlib.dhp.actionmanager.migration; package eu.dnetlib.dhp.actionmanager.migration;
import static eu.dnetlib.data.proto.KindProtos.Kind.entity; import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import static eu.dnetlib.data.proto.KindProtos.Kind.relation;
import static eu.dnetlib.data.proto.TypeProtos.*;
import static eu.dnetlib.data.proto.TypeProtos.Type.*;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -21,10 +19,6 @@ import eu.dnetlib.dhp.schema.oaf.*;
public class ProtoConverter implements Serializable { public class ProtoConverter implements Serializable {
public static final String UNKNOWN = "UNKNOWN";
public static final String NOT_AVAILABLE = "not available";
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
public static Oaf convert(OafProtos.Oaf oaf) { public static Oaf convert(OafProtos.Oaf oaf) {
try { try {
switch (oaf.getKind()) { switch (oaf.getKind()) {
@ -64,6 +58,7 @@ public class ProtoConverter implements Serializable {
case result: case result:
final Result r = convertResult(oaf); final Result r = convertResult(oaf);
r.setInstance(convertInstances(oaf)); r.setInstance(convertInstances(oaf));
r.setExternalReference(convertExternalRefs(oaf));
return r; return r;
case project: case project:
return convertProject(oaf); return convertProject(oaf);
@ -94,13 +89,44 @@ public class ProtoConverter implements Serializable {
i.setHostedby(mapKV(ri.getHostedby())); i.setHostedby(mapKV(ri.getHostedby()));
i.setInstancetype(mapQualifier(ri.getInstancetype())); i.setInstancetype(mapQualifier(ri.getInstancetype()));
i.setLicense(mapStringField(ri.getLicense())); i.setLicense(mapStringField(ri.getLicense()));
i.setUrl(ri.getUrlList()); i
.setUrl(
ri.getUrlList() != null ? ri
.getUrlList()
.stream()
.distinct()
.collect(Collectors.toCollection(ArrayList::new)) : null);
i.setRefereed(mapStringField(ri.getRefereed())); i.setRefereed(mapStringField(ri.getRefereed()));
i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount())); i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount()));
i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency())); i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency()));
return i; return i;
} }
private static List<ExternalReference> convertExternalRefs(OafProtos.Oaf oaf) {
ResultProtos.Result r = oaf.getEntity().getResult();
if (r.getExternalReferenceCount() > 0) {
return r
.getExternalReferenceList()
.stream()
.map(e -> convertExtRef(e))
.collect(Collectors.toList());
}
return Lists.newArrayList();
}
private static ExternalReference convertExtRef(ResultProtos.Result.ExternalReference e) {
ExternalReference ex = new ExternalReference();
ex.setUrl(e.getUrl());
ex.setSitename(e.getSitename());
ex.setRefidentifier(e.getRefidentifier());
ex.setQuery(e.getQuery());
ex.setQualifier(mapQualifier(e.getQualifier()));
ex.setLabel(e.getLabel());
ex.setDescription(e.getDescription());
ex.setDataInfo(ex.getDataInfo());
return ex;
}
private static Organization convertOrganization(OafProtos.Oaf oaf) { private static Organization convertOrganization(OafProtos.Oaf oaf) {
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata(); final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
final Organization org = setOaf(new Organization(), oaf); final Organization org = setOaf(new Organization(), oaf);

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<artifactId>dhp-aggregation</artifactId> <artifactId>dhp-aggregation</artifactId>

View File

@ -1,17 +1,21 @@
package eu.dnetlib.dhp.collection; package eu.dnetlib.dhp.collection;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
@ -23,6 +27,8 @@ import org.apache.spark.util.LongAccumulator;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -35,6 +41,8 @@ import eu.dnetlib.message.MessageType;
public class GenerateNativeStoreSparkJob { public class GenerateNativeStoreSparkJob {
private static final Logger log = LoggerFactory.getLogger(GenerateNativeStoreSparkJob.class);
public static MetadataRecord parseRecord( public static MetadataRecord parseRecord(
final String input, final String input,
final String xpath, final String xpath,
@ -78,84 +86,90 @@ public class GenerateNativeStoreSparkJob {
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class); final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
final long dateOfCollection = new Long(parser.get("dateOfCollection")); final long dateOfCollection = new Long(parser.get("dateOfCollection"));
final SparkSession spark = SparkSession Boolean isSparkSessionManaged = Optional
.builder() .ofNullable(parser.get("isSparkSessionManaged"))
.appName("GenerateNativeStoreSparkJob") .map(Boolean::valueOf)
.master(parser.get("master")) .orElse(Boolean.TRUE);
.getOrCreate(); log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final Map<String, String> ongoingMap = new HashMap<>(); final Map<String, String> ongoingMap = new HashMap<>();
final Map<String, String> reportMap = new HashMap<>(); final Map<String, String> reportMap = new HashMap<>();
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final JavaPairRDD<IntWritable, Text> inputRDD = sc final JavaPairRDD<IntWritable, Text> inputRDD = sc
.sequenceFile(parser.get("input"), IntWritable.class, Text.class); .sequenceFile(parser.get("input"), IntWritable.class, Text.class);
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems"); final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords"); final MessageManager manager = new MessageManager(
parser.get("rabbitHost"),
parser.get("rabbitUser"),
parser.get("rabbitPassword"),
false,
false,
null);
final MessageManager manager = new MessageManager( final JavaRDD<MetadataRecord> mappeRDD = inputRDD
parser.get("rabbitHost"), .map(
parser.get("rabbitUser"), item -> parseRecord(
parser.get("rabbitPassword"), item._2().toString(),
false, parser.get("xpath"),
false, parser.get("encoding"),
null); provenance,
dateOfCollection,
totalItems,
invalidRecords))
.filter(Objects::nonNull)
.distinct();
final JavaRDD<MetadataRecord> mappeRDD = inputRDD ongoingMap.put("ongoing", "0");
.map( if (!test) {
item -> parseRecord( manager
item._2().toString(), .sendMessage(
parser.get("xpath"), new Message(
parser.get("encoding"), parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
provenance, parser.get("rabbitOngoingQueue"),
dateOfCollection, true,
totalItems, false);
invalidRecords)) }
.filter(Objects::nonNull)
.distinct();
ongoingMap.put("ongoing", "0"); final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
if (!test) { final Dataset<MetadataRecord> mdstore = spark.createDataset(mappeRDD.rdd(), encoder);
manager final LongAccumulator mdStoreRecords = sc.sc().longAccumulator("MDStoreRecords");
.sendMessage( mdStoreRecords.add(mdstore.count());
new Message( ongoingMap.put("ongoing", "" + totalItems.value());
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap), if (!test) {
parser.get("rabbitOngoingQueue"), manager
true, .sendMessage(
false); new Message(
} parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
parser.get("rabbitOngoingQueue"),
true,
false);
}
mdstore.write().format("parquet").save(parser.get("output"));
reportMap.put("inputItem", "" + totalItems.value());
reportMap.put("invalidRecords", "" + invalidRecords.value());
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
if (!test) {
manager
.sendMessage(
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
parser.get("rabbitReportQueue"),
true,
false);
manager.close();
}
});
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
final Dataset<MetadataRecord> mdstore = spark.createDataset(mappeRDD.rdd(), encoder);
final LongAccumulator mdStoreRecords = sc.sc().longAccumulator("MDStoreRecords");
mdStoreRecords.add(mdstore.count());
ongoingMap.put("ongoing", "" + totalItems.value());
if (!test) {
manager
.sendMessage(
new Message(
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
parser.get("rabbitOngoingQueue"),
true,
false);
}
mdstore.write().format("parquet").save(parser.get("output"));
reportMap.put("inputItem", "" + totalItems.value());
reportMap.put("invalidRecords", "" + invalidRecords.value());
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
if (!test) {
manager
.sendMessage(
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
parser.get("rabbitReportQueue"),
true,
false);
manager.close();
}
} }
} }

View File

@ -1,13 +1,17 @@
package eu.dnetlib.dhp.transformation; package eu.dnetlib.dhp.transformation;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
@ -17,8 +21,11 @@ import org.dom4j.Document;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.collection.GenerateNativeStoreSparkJob;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper; import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper;
@ -29,6 +36,8 @@ import eu.dnetlib.message.MessageType;
public class TransformSparkJobNode { public class TransformSparkJobNode {
private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class);
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser( final ArgumentApplicationParser parser = new ArgumentApplicationParser(
@ -40,12 +49,18 @@ public class TransformSparkJobNode {
parser.parseArgument(args); parser.parseArgument(args);
Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String inputPath = parser.get("input"); final String inputPath = parser.get("input");
final String outputPath = parser.get("output"); final String outputPath = parser.get("output");
final String workflowId = parser.get("workflowId"); final String workflowId = parser.get("workflowId");
final String trasformationRule = extractXSLTFromTR( final String trasformationRule = extractXSLTFromTR(
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule")))); Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
final String master = parser.get("master");
final String rabbitUser = parser.get("rabbitUser"); final String rabbitUser = parser.get("rabbitUser");
final String rabbitPassword = parser.get("rabbitPassword"); final String rabbitPassword = parser.get("rabbitPassword");
final String rabbitHost = parser.get("rabbitHost"); final String rabbitHost = parser.get("rabbitHost");
@ -53,46 +68,48 @@ public class TransformSparkJobNode {
final long dateOfCollection = new Long(parser.get("dateOfCollection")); final long dateOfCollection = new Long(parser.get("dateOfCollection"));
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
final SparkSession spark = SparkSession SparkConf conf = new SparkConf();
.builder() runWithSparkSession(
.appName("TransformStoreSparkJob") conf,
.master(master) isSparkSessionManaged,
.getOrCreate(); spark -> {
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
final LongAccumulator errorItems = spark.sparkContext().longAccumulator("errorItems");
final LongAccumulator transformedItems = spark.sparkContext().longAccumulator("transformedItems");
final Map<String, Vocabulary> vocabularies = new HashMap<>();
vocabularies.put("dnet:languages", VocabularyHelper.getVocabularyFromAPI("dnet:languages"));
final TransformFunction transformFunction = new TransformFunction(
totalItems,
errorItems,
transformedItems,
trasformationRule,
dateOfCollection,
vocabularies);
mdstoreInput.map(transformFunction, encoder).write().format("parquet").save(outputPath);
if (rabbitHost != null) {
System.out.println("SEND FINAL REPORT");
final Map<String, String> reportMap = new HashMap<>();
reportMap.put("inputItem", "" + totalItems.value());
reportMap.put("invalidRecords", "" + errorItems.value());
reportMap.put("mdStoreSize", "" + transformedItems.value());
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
if (!test) {
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false,
false,
null);
manager
.sendMessage(
new Message(workflowId, "Transform", MessageType.REPORT, reportMap),
rabbitReportQueue,
true,
false);
manager.close();
}
}
});
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
final LongAccumulator errorItems = spark.sparkContext().longAccumulator("errorItems");
final LongAccumulator transformedItems = spark.sparkContext().longAccumulator("transformedItems");
final Map<String, Vocabulary> vocabularies = new HashMap<>();
vocabularies.put("dnet:languages", VocabularyHelper.getVocabularyFromAPI("dnet:languages"));
final TransformFunction transformFunction = new TransformFunction(
totalItems,
errorItems,
transformedItems,
trasformationRule,
dateOfCollection,
vocabularies);
mdstoreInput.map(transformFunction, encoder).write().format("parquet").save(outputPath);
if (rabbitHost != null) {
System.out.println("SEND FINAL REPORT");
final Map<String, String> reportMap = new HashMap<>();
reportMap.put("inputItem", "" + totalItems.value());
reportMap.put("invalidRecords", "" + errorItems.value());
reportMap.put("mdStoreSize", "" + transformedItems.value());
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
if (!test) {
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false,
null);
manager
.sendMessage(
new Message(workflowId, "Transform", MessageType.REPORT, reportMap),
rabbitReportQueue,
true,
false);
manager.close();
}
}
} }
private static String extractXSLTFromTR(final String tr) throws DocumentException { private static String extractXSLTFromTR(final String tr) throws DocumentException {

View File

@ -1,16 +1,86 @@
[ [
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {
{"paramName":"e", "paramLongName":"encoding", "paramDescription": "the encoding of the input record should be JSON or XML", "paramRequired": true}, "paramName": "issm",
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, "paramLongName": "isSparkSessionManaged",
{"paramName":"p", "paramLongName":"provenance", "paramDescription": "the infos about the provenance of the collected records", "paramRequired": true}, "paramDescription": "when true will stop SparkSession after job execution",
{"paramName":"x", "paramLongName":"xpath", "paramDescription": "the xpath to identify the record ifentifier", "paramRequired": true}, "paramRequired": false
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, },
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, {
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, "paramName": "e",
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, "paramLongName": "encoding",
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, "paramDescription": "the encoding of the input record should be JSON or XML",
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, "paramRequired": true
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, },
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, {
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} "paramName": "d",
"paramLongName": "dateOfCollection",
"paramDescription": "the date when the record has been stored",
"paramRequired": true
},
{
"paramName": "p",
"paramLongName": "provenance",
"paramDescription": "the infos about the provenance of the collected records",
"paramRequired": true
},
{
"paramName": "x",
"paramLongName": "xpath",
"paramDescription": "the xpath to identify the record identifier",
"paramRequired": true
},
{
"paramName": "i",
"paramLongName": "input",
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
{
"paramName": "o",
"paramLongName": "output",
"paramDescription": "the path of the result DataFrame on HDFS",
"paramRequired": true
},
{
"paramName": "ru",
"paramLongName": "rabbitUser",
"paramDescription": "the user to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rp",
"paramLongName": "rabbitPassword",
"paramDescription": "the password to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rh",
"paramLongName": "rabbitHost",
"paramDescription": "the host of the RabbitMq server",
"paramRequired": true
},
{
"paramName": "ro",
"paramLongName": "rabbitOngoingQueue",
"paramDescription": "the name of the ongoing queue",
"paramRequired": true
},
{
"paramName": "rr",
"paramLongName": "rabbitReportQueue",
"paramDescription": "the name of the report queue",
"paramRequired": true
},
{
"paramName": "w",
"paramLongName": "workflowId",
"paramDescription": "the identifier of the dnet Workflow",
"paramRequired": true
},
{
"paramName": "t",
"paramLongName": "isTest",
"paramDescription": "the name of the report queue",
"paramRequired": false
}
] ]

View File

@ -1,16 +1,74 @@
[ [
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, "paramName": "issm",
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, "paramLongName": "isSparkSessionManaged",
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, "paramDescription": "when true will stop SparkSession after job execution",
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, "paramRequired": false
{"paramName":"tr", "paramLongName":"transformationRule","paramDescription": "the transformation Rule to apply to the input MDStore", "paramRequired": true}, },
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, {
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, "paramName": "d",
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, "paramLongName": "dateOfCollection",
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, "paramDescription": "the date when the record has been stored",
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, "paramRequired": true
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} },
{
"paramName": "i",
"paramLongName": "input",
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
{
"paramName": "o",
"paramLongName": "output",
"paramDescription": "the path of the result DataFrame on HDFS",
"paramRequired": true
},
{
"paramName": "w",
"paramLongName": "workflowId",
"paramDescription": "the identifier of the dnet Workflow",
"paramRequired": true
},
{
"paramName": "tr",
"paramLongName": "transformationRule",
"paramDescription": "the transformation Rule to apply to the input MDStore",
"paramRequired": true
},
{
"paramName": "ru",
"paramLongName": "rabbitUser",
"paramDescription": "the user to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rp",
"paramLongName": "rabbitPassword",
"paramDescription": "the password to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rh",
"paramLongName": "rabbitHost",
"paramDescription": "the host of the RabbitMq server",
"paramRequired": true
},
{
"paramName": "ro",
"paramLongName": "rabbitOngoingQueue",
"paramDescription": "the name of the ongoing queue",
"paramRequired": true
},
{
"paramName": "rr",
"paramLongName": "rabbitReportQueue",
"paramDescription": "the name of the report queue",
"paramRequired": true
},
{
"paramName": "t",
"paramLongName": "isTest",
"paramDescription": "the name of the report queue",
"paramRequired": false
}
] ]

View File

@ -9,65 +9,60 @@ import java.nio.file.Path;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.AfterEach; import org.apache.spark.SparkConf;
import org.junit.jupiter.api.BeforeEach; import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.*;
import org.junit.jupiter.api.io.TempDir;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.model.mdstore.Provenance; import eu.dnetlib.dhp.model.mdstore.Provenance;
import eu.dnetlib.dhp.schema.common.ModelSupport;
public class CollectionJobTest { public class CollectionJobTest {
private Path testDir; private static SparkSession spark;
@BeforeEach @BeforeAll
public void setup() throws IOException { public static void beforeAll() {
testDir = Files.createTempDirectory("dhp-collection"); SparkConf conf = new SparkConf();
conf.setAppName(CollectionJobTest.class.getSimpleName());
conf.setMaster("local");
spark = SparkSession.builder().config(conf).getOrCreate();
} }
@AfterEach @AfterAll
public void teadDown() throws IOException { public static void afterAll() {
FileUtils.deleteDirectory(testDir.toFile()); spark.stop();
} }
@Test @Test
public void tesCollection() throws Exception { public void tesCollection(@TempDir Path testDir) throws Exception {
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix"); final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
Assertions.assertNotNull(new ObjectMapper().writeValueAsString(provenance));
GenerateNativeStoreSparkJob GenerateNativeStoreSparkJob
.main( .main(
new String[] { new String[] {
"-mt", "issm", "true",
"local", "-w", "wid",
"-w", "-e", "XML",
"wid", "-d", "" + System.currentTimeMillis(),
"-e", "-p", new ObjectMapper().writeValueAsString(provenance),
"XML", "-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
"-d", "-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
"" + System.currentTimeMillis(), "-o", testDir.toString() + "/store",
"-p", "-t", "true",
new ObjectMapper().writeValueAsString(provenance), "-ru", "",
"-x", "-rp", "",
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "-rh", "",
"-i", "-ro", "",
this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(), "-rr", ""
"-o",
testDir.toString() + "/store",
"-t",
"true",
"-ru",
"",
"-rp",
"",
"-rh",
"",
"-ro",
"",
"-rr",
""
}); });
System.out.println(new ObjectMapper().writeValueAsString(provenance));
// TODO introduce useful assertions
} }
@Test @Test
@ -85,9 +80,8 @@ public class CollectionJobTest {
null, null,
null); null);
assert record != null; assertNotNull(record.getId());
System.out.println(record.getId()); assertNotNull(record.getOriginalId());
System.out.println(record.getOriginalId());
} }
@Test @Test
@ -112,10 +106,12 @@ public class CollectionJobTest {
System.currentTimeMillis(), System.currentTimeMillis(),
null, null,
null); null);
assert record != null;
record.setBody("ciao"); record.setBody("ciao");
assert record1 != null;
record1.setBody("mondo"); record1.setBody("mondo");
assertNotNull(record);
assertNotNull(record1);
assertEquals(record, record1); assertEquals(record, record1);
} }
} }

View File

@ -12,10 +12,14 @@ import java.util.Map;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.util.LongAccumulator; import org.apache.spark.util.LongAccumulator;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
@ -23,6 +27,7 @@ import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import eu.dnetlib.dhp.collection.CollectionJobTest;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.transformation.functions.Cleaner; import eu.dnetlib.dhp.transformation.functions.Cleaner;
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
@ -33,6 +38,21 @@ import net.sf.saxon.s9api.*;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
public class TransformationJobTest { public class TransformationJobTest {
private static SparkSession spark;
@BeforeAll
public static void beforeAll() {
SparkConf conf = new SparkConf();
conf.setAppName(CollectionJobTest.class.getSimpleName());
conf.setMaster("local");
spark = SparkSession.builder().config(conf).getOrCreate();
}
@AfterAll
public static void afterAll() {
spark.stop();
}
@Mock @Mock
private LongAccumulator accumulator; private LongAccumulator accumulator;
@ -78,31 +98,21 @@ public class TransformationJobTest {
TransformSparkJobNode TransformSparkJobNode
.main( .main(
new String[] { new String[] {
"-mt", "-issm", "true",
"local", "-i", mdstore_input,
"-i", "-o", mdstore_output,
mdstore_input, "-d", "1",
"-o", "-w", "1",
mdstore_output, "-tr", xslt,
"-d", "-t", "true",
"1", "-ru", "",
"-w", "-rp", "",
"1", "-rh", "",
"-tr", "-ro", "",
xslt, "-rr", ""
"-t",
"true",
"-ru",
"",
"-rp",
"",
"-rh",
"",
"-ro",
"",
"-rr",
""
}); });
// TODO introduce useful assertions
} }
@Test @Test

View File

@ -0,0 +1,3 @@
# dhp-broker-events
dhp-broker-events is a DNET module responsible
of the production of events for the OpenAIRE Broker Service.

View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId>
<version>1.1.8-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dhp-broker-events</artifactId>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-schemas</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
</dependency>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
</dependency>
<dependency>
<groupId>jaxen</groupId>
<artifactId>jaxen</artifactId>
</dependency>
<dependency>
<groupId>eu.dnetlib</groupId>
<artifactId>dnet-openaire-broker-common</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,104 @@
package eu.dnetlib.dhp.broker.model;
import java.util.Map;
public class Event {
private String eventId;
private String producerId;
private String topic;
private String payload;
private Long creationDate;
private Long expiryDate;
private boolean instantMessage;
private Map<String, Object> map;
public Event() {
}
public Event(final String producerId, final String eventId, final String topic, final String payload,
final Long creationDate, final Long expiryDate,
final boolean instantMessage,
final Map<String, Object> map) {
this.producerId = producerId;
this.eventId = eventId;
this.topic = topic;
this.payload = payload;
this.creationDate = creationDate;
this.expiryDate = expiryDate;
this.instantMessage = instantMessage;
this.map = map;
}
public String getProducerId() {
return this.producerId;
}
public void setProducerId(final String producerId) {
this.producerId = producerId;
}
public String getEventId() {
return this.eventId;
}
public void setEventId(final String eventId) {
this.eventId = eventId;
}
public String getTopic() {
return this.topic;
}
public void setTopic(final String topic) {
this.topic = topic;
}
public String getPayload() {
return this.payload;
}
public void setPayload(final String payload) {
this.payload = payload;
}
public Long getCreationDate() {
return this.creationDate;
}
public void setCreationDate(final Long creationDate) {
this.creationDate = creationDate;
}
public Long getExpiryDate() {
return this.expiryDate;
}
public void setExpiryDate(final Long expiryDate) {
this.expiryDate = expiryDate;
}
public boolean isInstantMessage() {
return this.instantMessage;
}
public void setInstantMessage(final boolean instantMessage) {
this.instantMessage = instantMessage;
}
public Map<String, Object> getMap() {
return this.map;
}
public void setMap(final Map<String, Object> map) {
this.map = map;
}
}

View File

@ -0,0 +1,140 @@
package eu.dnetlib.dhp.broker.model;
import java.text.ParseException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
public class EventFactory {
private final static String PRODUCER_ID = "OpenAIRE";
private static final int TTH_DAYS = 365;
private final static String[] DATE_PATTERNS = {
"yyyy-MM-dd"
};
public static Event newBrokerEvent(final Result source, final Result target, final UpdateInfo<?> updateInfo) {
final long now = new Date().getTime();
final Event res = new Event();
final Map<String, Object> map = createMapFromResult(target, source, updateInfo);
final String payload = createPayload(target, updateInfo);
final String eventId = calculateEventId(
updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
res.setEventId(eventId);
res.setProducerId(PRODUCER_ID);
res.setPayload(payload);
res.setMap(map);
res.setTopic(updateInfo.getTopic());
res.setCreationDate(now);
res.setExpiryDate(calculateExpiryDate(now));
res.setInstantMessage(false);
return res;
}
private static String createPayload(final Result result, final UpdateInfo<?> updateInfo) {
final OpenAireEventPayload payload = new OpenAireEventPayload();
// TODO
updateInfo.compileHighlight(payload);
return payload.toJSON();
}
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source,
final UpdateInfo<?> updateInfo) {
final Map<String, Object> map = new HashMap<>();
final List<KeyValue> collectedFrom = oaf.getCollectedfrom();
if (collectedFrom.size() == 1) {
map.put("target_datasource_id", collectedFrom.get(0).getKey());
map.put("target_datasource_name", collectedFrom.get(0).getValue());
}
final List<String> ids = oaf.getOriginalId();
if (ids.size() > 0) {
map.put("target_publication_id", ids.get(0));
}
final List<StructuredProperty> titles = oaf.getTitle();
if (titles.size() > 0) {
map.put("target_publication_title", titles.get(0));
}
final long date = parseDateTolong(oaf.getDateofacceptance().getValue());
if (date > 0) {
map.put("target_dateofacceptance", date);
}
final List<StructuredProperty> subjects = oaf.getSubject();
if (subjects.size() > 0) {
map
.put(
"target_publication_subject_list",
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
}
final List<Author> authors = oaf.getAuthor();
if (authors.size() > 0) {
map
.put(
"target_publication_author_list",
authors.stream().map(Author::getFullname).collect(Collectors.toList()));
}
// PROVENANCE INFO
map.put("trust", updateInfo.getTrust());
final List<KeyValue> sourceCollectedFrom = source.getCollectedfrom();
if (sourceCollectedFrom.size() == 1) {
map.put("provenance_datasource_id", sourceCollectedFrom.get(0).getKey());
map.put("provenance_datasource_name", sourceCollectedFrom.get(0).getValue());
}
map.put("provenance_publication_id_list", source.getOriginalId());
return map;
}
private static String calculateEventId(final String topic, final String publicationId, final String value) {
return "event-"
+ DigestUtils.md5Hex(topic).substring(0, 6) + "-"
+ DigestUtils.md5Hex(publicationId).substring(0, 8) + "-"
+ DigestUtils.md5Hex(value).substring(0, 8);
}
private static long calculateExpiryDate(final long now) {
return now + TTH_DAYS * 24 * 60 * 60 * 1000;
}
private static long parseDateTolong(final String date) {
if (StringUtils.isBlank(date)) {
return -1;
}
try {
return DateUtils.parseDate(date, DATE_PATTERNS).getTime();
} catch (final ParseException e) {
return -1;
}
}
}

View File

@ -0,0 +1,112 @@
package eu.dnetlib.dhp.broker.oa;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.broker.model.Event;
import eu.dnetlib.dhp.broker.model.EventFactory;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAbstract;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAuthorOrcid;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingOpenAccess;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPid;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingProject;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPublicationDate;
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingSubject;
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreOpenAccess;
import eu.dnetlib.dhp.broker.oa.util.EnrichMorePid;
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreSubject;
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.oaf.Result;
public class GenerateEventsApplication {
private static final Logger log = LoggerFactory.getLogger(GenerateEventsApplication.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
GenerateEventsApplication.class
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
parser.parseArgument(args);
final Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String graphPath = parser.get("graphPath");
log.info("graphPath: {}", graphPath);
final String eventsPath = parser.get("eventsPath");
log.info("eventsPath: {}", eventsPath);
final SparkConf conf = new SparkConf();
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
removeOutputDir(spark, eventsPath);
generateEvents(spark, graphPath, eventsPath);
});
}
private static void removeOutputDir(final SparkSession spark, final String path) {
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
}
private static void generateEvents(final SparkSession spark, final String graphPath, final String eventsPath) {
// TODO
}
private List<Event> generateEvents(final Result... children) {
final List<Event> list = new ArrayList<>();
for (final Result source : children) {
for (final Result target : children) {
if (source != target) {
list
.addAll(
findUpdates(source, target)
.stream()
.map(info -> EventFactory.newBrokerEvent(source, target, info))
.collect(Collectors.toList()));
}
}
}
return list;
}
private List<UpdateInfo<?>> findUpdates(final Result source, final Result target) {
final List<UpdateInfo<?>> list = new ArrayList<>();
list.addAll(EnrichMissingAbstract.findUpdates(source, target));
list.addAll(EnrichMissingAuthorOrcid.findUpdates(source, target));
list.addAll(EnrichMissingOpenAccess.findUpdates(source, target));
list.addAll(EnrichMissingPid.findUpdates(source, target));
list.addAll(EnrichMissingProject.findUpdates(source, target));
list.addAll(EnrichMissingPublicationDate.findUpdates(source, target));
list.addAll(EnrichMissingSubject.findUpdates(source, target));
list.addAll(EnrichMoreOpenAccess.findUpdates(source, target));
list.addAll(EnrichMorePid.findUpdates(source, target));
list.addAll(EnrichMoreSubject.findUpdates(source, target));
return list;
}
}

View File

@ -0,0 +1,31 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingAbstract extends UpdateInfo<String> {
public static List<EnrichMissingAbstract> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingAbstract(final String highlightValue, final float trust) {
super("ENRICH/MISSING/ABSTRACT", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getAbstracts().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue();
}
}

View File

@ -0,0 +1,31 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingAuthorOrcid extends UpdateInfo<String> {
public static List<EnrichMissingAuthorOrcid> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingAuthorOrcid(final String highlightValue, final float trust) {
super("ENRICH/MISSING/AUTHOR/ORCID", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
// TODO
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue();
}
}

View File

@ -0,0 +1,32 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.Instance;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingOpenAccess extends UpdateInfo<Instance> {
public static List<EnrichMissingOpenAccess> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingOpenAccess(final Instance highlightValue, final float trust) {
super("ENRICH/MISSING/OPENACCESS_VERSION", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getInstances().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue().getUrl();
}
}

View File

@ -0,0 +1,32 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.broker.objects.Pid;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingPid extends UpdateInfo<Pid> {
public static List<EnrichMissingPid> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingPid(final Pid highlightValue, final float trust) {
super("ENRICH/MISSING/PID", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getPids().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
}
}

View File

@ -0,0 +1,33 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.broker.objects.Project;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingProject extends UpdateInfo<Project> {
public static List<EnrichMissingProject> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingProject(final Project highlightValue, final float trust) {
super("ENRICH/MISSING/PROJECT", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getProjects().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram()
+ getHighlightValue().getCode();
}
}

View File

@ -0,0 +1,31 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingPublicationDate extends UpdateInfo<String> {
public static List<EnrichMissingPublicationDate> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMissingPublicationDate(final String highlightValue, final float trust) {
super("ENRICH/MISSING/PUBLICATION_DATE", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().setPublicationdate(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue();
}
}

View File

@ -0,0 +1,36 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMissingSubject extends UpdateInfo<String> {
public static List<EnrichMissingSubject> findUpdates(final Result source, final Result target) {
// MESHEUROPMC
// ARXIV
// JEL
// DDC
// ACM
return Arrays.asList();
}
private EnrichMissingSubject(final String subjectClassification, final String highlightValue, final float trust) {
super("ENRICH/MISSING/SUBJECT/" + subjectClassification, highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getSubjects().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue();
}
}

View File

@ -0,0 +1,32 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.Instance;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMoreOpenAccess extends UpdateInfo<Instance> {
public static List<EnrichMoreOpenAccess> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMoreOpenAccess(final Instance highlightValue, final float trust) {
super("ENRICH/MORE/OPENACCESS_VERSION", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getInstances().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue().getUrl();
}
}

View File

@ -0,0 +1,32 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.broker.objects.Pid;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMorePid extends UpdateInfo<Pid> {
public static List<EnrichMorePid> findUpdates(final Result source, final Result target) {
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
return Arrays.asList();
}
private EnrichMorePid(final Pid highlightValue, final float trust) {
super("ENRICH/MORE/PID", highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getPids().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
}
}

View File

@ -0,0 +1,36 @@
package eu.dnetlib.dhp.broker.oa.util;
import java.util.Arrays;
import java.util.List;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
import eu.dnetlib.dhp.schema.oaf.Result;
public class EnrichMoreSubject extends UpdateInfo<String> {
public static List<EnrichMoreSubject> findUpdates(final Result source, final Result target) {
// MESHEUROPMC
// ARXIV
// JEL
// DDC
// ACM
return Arrays.asList();
}
private EnrichMoreSubject(final String subjectClassification, final String highlightValue, final float trust) {
super("ENRICH/MORE/SUBJECT/" + subjectClassification, highlightValue, trust);
}
@Override
public void compileHighlight(final OpenAireEventPayload payload) {
payload.getHighlight().getSubjects().add(getHighlightValue());
}
@Override
public String getHighlightValueAsString() {
return getHighlightValue();
}
}

View File

@ -0,0 +1,36 @@
package eu.dnetlib.dhp.broker.oa.util;
import eu.dnetlib.broker.objects.OpenAireEventPayload;
public abstract class UpdateInfo<T> {
private final String topic;
private final T highlightValue;
private final float trust;
protected UpdateInfo(final String topic, final T highlightValue, final float trust) {
this.topic = topic;
this.highlightValue = highlightValue;
this.trust = trust;
}
public T getHighlightValue() {
return highlightValue;
}
public float getTrust() {
return trust;
}
public String getTopic() {
return topic;
}
abstract public void compileHighlight(OpenAireEventPayload payload);
abstract public String getHighlightValueAsString();
}

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>dhp-dedup-openaire</artifactId> <artifactId>dhp-dedup-openaire</artifactId>

View File

@ -137,10 +137,14 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
} }
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) { private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
String entityType = dedupConf.getWf().getEntityType();
Relation r = new Relation(); Relation r = new Relation();
r.setSource(source); r.setSource(source);
r.setTarget(target); r.setTarget(target);
r.setRelClass(relClass); r.setRelClass(relClass);
r.setRelType(entityType + entityType.substring(0, 1).toUpperCase() + entityType.substring(1));
r.setSubRelType("dedup"); r.setSubRelType("dedup");
DataInfo info = new DataInfo(); DataInfo info = new DataInfo();

View File

@ -86,7 +86,8 @@ public class SparkPropagateRelation extends AbstractSparkAction {
mergedIds, mergedIds,
FieldType.TARGET, FieldType.TARGET,
getFixRelFn(FieldType.TARGET)) getFixRelFn(FieldType.TARGET))
.filter(SparkPropagateRelation::containsDedup); .filter(SparkPropagateRelation::containsDedup)
.distinct();
Dataset<Relation> updated = processDataset( Dataset<Relation> updated = processDataset(
processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()), processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()),
@ -94,7 +95,7 @@ public class SparkPropagateRelation extends AbstractSparkAction {
FieldType.TARGET, FieldType.TARGET,
getDeletedFn()); getDeletedFn());
save(newRels.union(updated), outputRelationPath, SaveMode.Overwrite); save(newRels.union(updated).union(mergeRels), outputRelationPath, SaveMode.Overwrite);
} }
private static Dataset<Relation> processDataset( private static Dataset<Relation> processDataset(

View File

@ -75,12 +75,20 @@
</configuration> </configuration>
</global> </global>
<start to="CreateSimRel"/> <start to="resetWorkingPath"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="resetWorkingPath">
<fs>
<delete path="${workingPath}"/>
</fs>
<ok to="CreateSimRel"/>
<error to="Kill"/>
</action>
<action name="CreateSimRel"> <action name="CreateSimRel">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>

View File

@ -18,6 +18,7 @@ import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
@ -29,6 +30,8 @@ import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
@ -420,7 +423,7 @@ public class SparkDedupTest implements Serializable {
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count(); long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
assertEquals(826, relations); assertEquals(5022, relations);
// check deletedbyinference // check deletedbyinference
final Dataset<Relation> mergeRels = spark final Dataset<Relation> mergeRels = spark
@ -450,6 +453,25 @@ public class SparkDedupTest implements Serializable {
assertEquals(updated, deletedbyinference); assertEquals(updated, deletedbyinference);
} }
@Test
@Order(6)
public void testRelations() throws Exception {
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_1.json", 12, 10);
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_2.json", 10, 2);
}
private void testUniqueness(String path, int expected_total, int expected_unique) {
Dataset<Relation> rel = spark
.read()
.textFile(getClass().getResource(path).getPath())
.map(
(MapFunction<String, Relation>) s -> new ObjectMapper().readValue(s, Relation.class),
Encoders.bean(Relation.class));
assertEquals(expected_total, rel.count());
assertEquals(expected_unique, rel.distinct().count());
}
@AfterAll @AfterAll
public static void finalCleanUp() throws IOException { public static void finalCleanUp() throws IOException {
FileUtils.deleteDirectory(new File(testOutputBasePath)); FileUtils.deleteDirectory(new File(testOutputBasePath));

View File

@ -0,0 +1,12 @@
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::40c7b1dfa18c3693d374dafd21ef852f","subRelType":"provision","target":"10|doajarticles::618df40624078491acfd93ca3ff6921c"}
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::0b4e756a73338f60b84de98d080f6422","subRelType":"provision","target":"10|doajarticles::6d01e689db13b6977b411f4170b6143b"}
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::fe2f7c9d350b9c5aa658ec384d761e33","subRelType":"provision","target":"10|doajarticles::9b8a956b0703854ba79e52ddf7dc552e"}
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::a116734108ba011ef715b012f095e3f5","subRelType":"provision","target":"10|doajarticles::c5de04b1a35da2cc4468e299bc9ffa16"}
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::8b83abbbcad5496fe43cda88d0045aa4","subRelType":"provision","target":"10|opendoar____::6855456e2fe46a9d49d3d3af4f57443d"}
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::88034de0247d9d36e22783e9319c5ba3","subRelType":"provision","target":"10|opendoar____::c17028c9b6e0c5deaad29665d582284a"}
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::dfb21c796f33e9acf505cc960a3d8d2c","subRelType":"provision","target":"10|opendoar____::dfa037a53e121ecc9e0926800c3e814e"}
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::b526b1aa1562038881a31be59896985f","subRelType":"provision","target":"10|re3data_____::2e457773b62df3534cc04441bf406a70"}
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::6b306183bc051b5aaa5376f2fab6e6e5","subRelType":"provision","target":"10|re3data_____::6371ff9ee1ec7073416cb83c868b10a3"}
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}

View File

@ -0,0 +1,10 @@
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681629"}

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +25,6 @@ import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper; import org.dom4j.DocumentHelper;
import org.dom4j.Node; import org.dom4j.Node;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
@ -48,6 +48,21 @@ public abstract class AbstractMdRecordToOafMapper {
protected final Map<String, String> code2name; protected final Map<String, String> code2name;
protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4";
protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3";
protected static final Map<String, String> nsContext = new HashMap<>();
static {
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
nsContext.put("datacite", DATACITE_SCHEMA_KERNEL_3);
}
protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier( protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier(
"main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); "main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
@ -57,31 +72,27 @@ public abstract class AbstractMdRecordToOafMapper {
public List<Oaf> processMdRecord(final String xml) { public List<Oaf> processMdRecord(final String xml) {
try { try {
final Map<String, String> nsContext = new HashMap<>();
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
final Document doc = DocumentHelper final Document doc = DocumentHelper
.parseText( .parseText(
xml xml.replaceAll(DATACITE_SCHEMA_KERNEL_4, DATACITE_SCHEMA_KERNEL_3));
.replaceAll(
"http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
final String type = doc.valueOf("//dr:CobjCategory/@type"); final String type = doc.valueOf("//dr:CobjCategory/@type");
final KeyValue collectedFrom = keyValue( final KeyValue collectedFrom = getProvenanceDatasource(
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc, "//oaf:collectedFrom/@id", "//oaf:collectedFrom/@name");
doc.valueOf("//oaf:collectedFrom/@name"));
if (collectedFrom == null) {
return null;
}
final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id")) final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
? collectedFrom ? collectedFrom
: keyValue( : getProvenanceDatasource(doc, "//oaf:hostedBy/@id", "//oaf:hostedBy/@name");
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
doc.valueOf("//oaf:hostedBy/@name")); if (hostedBy == null) {
return null;
}
final DataInfo info = prepareDataInfo(doc); final DataInfo info = prepareDataInfo(doc);
final long lastUpdateTimestamp = new Date().getTime(); final long lastUpdateTimestamp = new Date().getTime();
@ -92,6 +103,19 @@ public abstract class AbstractMdRecordToOafMapper {
} }
} }
private KeyValue getProvenanceDatasource(Document doc, String xpathId, String xpathName) {
final String dsId = doc.valueOf(xpathId);
final String dsName = doc.valueOf(xpathName);
if (StringUtils.isBlank(dsId) | StringUtils.isBlank(dsName)) {
return null;
}
return keyValue(
createOpenaireId(10, dsId, true),
dsName);
}
protected List<Oaf> createOafs( protected List<Oaf> createOafs(
final Document doc, final Document doc,
final String type, final String type,
@ -107,14 +131,14 @@ public abstract class AbstractMdRecordToOafMapper {
case "publication": case "publication":
final Publication p = new Publication(); final Publication p = new Publication();
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER); p.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
p.setJournal(prepareJournal(doc, info)); p.setJournal(prepareJournal(doc, info));
oafs.add(p); oafs.add(p);
break; break;
case "dataset": case "dataset":
final Dataset d = new Dataset(); final Dataset d = new Dataset();
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER); d.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
d.setStoragedate(prepareDatasetStorageDate(doc, info)); d.setStoragedate(prepareDatasetStorageDate(doc, info));
d.setDevice(prepareDatasetDevice(doc, info)); d.setDevice(prepareDatasetDevice(doc, info));
d.setSize(prepareDatasetSize(doc, info)); d.setSize(prepareDatasetSize(doc, info));
@ -127,7 +151,7 @@ public abstract class AbstractMdRecordToOafMapper {
case "software": case "software":
final Software s = new Software(); final Software s = new Software();
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER); s.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
s.setLicense(prepareSoftwareLicenses(doc, info)); s.setLicense(prepareSoftwareLicenses(doc, info));
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info)); s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
@ -138,7 +162,7 @@ public abstract class AbstractMdRecordToOafMapper {
default: default:
final OtherResearchProduct o = new OtherResearchProduct(); final OtherResearchProduct o = new OtherResearchProduct();
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER); o.setResulttype(ORP_DEFAULT_RESULTTYPE);
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
o.setTool(prepareOtherResearchProductTools(doc, info)); o.setTool(prepareOtherResearchProductTools(doc, info));
@ -171,33 +195,36 @@ public abstract class AbstractMdRecordToOafMapper {
if (StringUtils.isNotBlank(originalId)) { if (StringUtils.isNotBlank(originalId)) {
final String projectId = createOpenaireId(40, originalId, true); final String projectId = createOpenaireId(40, originalId, true);
final Relation r1 = new Relation(); res
r1.setRelType("resultProject"); .add(
r1.setSubRelType("outcome"); getRelation(
r1.setRelClass("isProducedBy"); docId, projectId, RESULT_PROJECT, OUTCOME, IS_PRODUCED_BY, collectedFrom, info,
r1.setSource(docId); lastUpdateTimestamp));
r1.setTarget(projectId); res
r1.setCollectedfrom(Arrays.asList(collectedFrom)); .add(
r1.setDataInfo(info); getRelation(
r1.setLastupdatetimestamp(lastUpdateTimestamp); projectId, docId, RESULT_PROJECT, OUTCOME, PRODUCES, collectedFrom, info,
res.add(r1); lastUpdateTimestamp));
final Relation r2 = new Relation();
r2.setRelType("resultProject");
r2.setSubRelType("outcome");
r2.setRelClass("produces");
r2.setSource(projectId);
r2.setTarget(docId);
r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info);
r2.setLastupdatetimestamp(lastUpdateTimestamp);
res.add(r2);
} }
} }
return res; return res;
} }
protected Relation getRelation(String source, String target, String relType, String subRelType, String relClass,
KeyValue collectedFrom, DataInfo info, long lastUpdateTimestamp) {
final Relation rel = new Relation();
rel.setRelType(relType);
rel.setSubRelType(subRelType);
rel.setRelClass(relClass);
rel.setSource(source);
rel.setTarget(target);
rel.setCollectedfrom(Arrays.asList(collectedFrom));
rel.setDataInfo(info);
rel.setLastupdatetimestamp(lastUpdateTimestamp);
return rel;
}
protected abstract List<Oaf> addOtherResultRels( protected abstract List<Oaf> addOtherResultRels(
final Document doc, final Document doc,
final KeyValue collectedFrom, final KeyValue collectedFrom,
@ -423,7 +450,7 @@ public abstract class AbstractMdRecordToOafMapper {
if (n == null) { if (n == null) {
return dataInfo( return dataInfo(
false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); false, null, false, false, REPOSITORY_PROVENANCE_ACTIONS, "0.9");
} }
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");

View File

@ -95,6 +95,7 @@ public class GenerateEntitiesApplication {
.sequenceFile(sp, Text.class, Text.class) .sequenceFile(sp, Text.class, Text.class)
.map(k -> new Tuple2<>(k._1().toString(), k._2().toString())) .map(k -> new Tuple2<>(k._1().toString(), k._2().toString()))
.map(k -> convertToListOaf(k._1(), k._2(), code2name)) .map(k -> convertToListOaf(k._1(), k._2(), code2name))
.filter(Objects::nonNull)
.flatMap(list -> list.iterator())); .flatMap(list -> list.iterator()));
} }

View File

@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -31,7 +32,6 @@ import org.apache.commons.logging.LogFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication; import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient; import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
@ -55,6 +55,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class);
public static final String SOURCE_TYPE = "source_type";
public static final String TARGET_TYPE = "target_type";
private final DbClient dbClient; private final DbClient dbClient;
private final long lastUpdateTimestamp; private final long lastUpdateTimestamp;
@ -304,9 +307,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("datasourceOrganization"); r1.setRelType(DATASOURCE_ORGANIZATION);
r1.setSubRelType("provision"); r1.setSubRelType(PROVISION);
r1.setRelClass("isProvidedBy"); r1.setRelClass(IS_PROVIDED_BY);
r1.setSource(dsId); r1.setSource(dsId);
r1.setTarget(orgId); r1.setTarget(orgId);
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
@ -314,9 +317,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
r1.setLastupdatetimestamp(lastUpdateTimestamp); r1.setLastupdatetimestamp(lastUpdateTimestamp);
final Relation r2 = new Relation(); final Relation r2 = new Relation();
r2.setRelType("datasourceOrganization"); r2.setRelType(DATASOURCE_ORGANIZATION);
r2.setSubRelType("provision"); r2.setSubRelType(PROVISION);
r2.setRelClass("provides"); r2.setRelClass(PROVIDES);
r2.setSource(orgId); r2.setSource(orgId);
r2.setTarget(dsId); r2.setTarget(dsId);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
@ -338,9 +341,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("projectOrganization"); r1.setRelType(PROJECT_ORGANIZATION);
r1.setSubRelType("participation"); r1.setSubRelType(PARTICIPATION);
r1.setRelClass("hasParticipant"); r1.setRelClass(HAS_PARTICIPANT);
r1.setSource(projectId); r1.setSource(projectId);
r1.setTarget(orgId); r1.setTarget(orgId);
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
@ -348,9 +351,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
r1.setLastupdatetimestamp(lastUpdateTimestamp); r1.setLastupdatetimestamp(lastUpdateTimestamp);
final Relation r2 = new Relation(); final Relation r2 = new Relation();
r2.setRelType("projectOrganization"); r2.setRelType(PROJECT_ORGANIZATION);
r2.setSubRelType("participation"); r2.setSubRelType(PARTICIPATION);
r2.setRelClass("isParticipant"); r2.setRelClass(IS_PARTICIPANT);
r2.setSource(orgId); r2.setSource(orgId);
r2.setTarget(projectId); r2.setTarget(projectId);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
@ -367,28 +370,28 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final DataInfo info = dataInfo( final DataInfo info = dataInfo(
false, null, false, false, false, null, false, false,
qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9"); qualifier(USER_CLAIM, USER_CLAIM, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), "0.9");
final List<KeyValue> collectedFrom = listKeyValues( final List<KeyValue> collectedFrom = listKeyValues(
createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE"); createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
try { try {
if (rs.getString("source_type").equals("context")) { if (rs.getString(SOURCE_TYPE).equals("context")) {
final Result r; final Result r;
if (rs.getString("target_type").equals("dataset")) { if (rs.getString(TARGET_TYPE).equals("dataset")) {
r = new Dataset(); r = new Dataset();
r.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER); r.setResulttype(DATASET_DEFAULT_RESULTTYPE);
} else if (rs.getString("target_type").equals("software")) { } else if (rs.getString(TARGET_TYPE).equals("software")) {
r = new Software(); r = new Software();
r.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER); r.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
} else if (rs.getString("target_type").equals("other")) { } else if (rs.getString(TARGET_TYPE).equals("other")) {
r = new OtherResearchProduct(); r = new OtherResearchProduct();
r.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER); r.setResulttype(ORP_DEFAULT_RESULTTYPE);
} else { } else {
r = new Publication(); r = new Publication();
r.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER); r.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
} }
r.setId(createOpenaireId(50, rs.getString("target_id"), false)); r.setId(createOpenaireId(50, rs.getString("target_id"), false));
r.setLastupdatetimestamp(lastUpdateTimestamp); r.setLastupdatetimestamp(lastUpdateTimestamp);
@ -398,32 +401,32 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
return Arrays.asList(r); return Arrays.asList(r);
} else { } else {
final String sourceId = createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false); final String sourceId = createOpenaireId(rs.getString(SOURCE_TYPE), rs.getString("source_id"), false);
final String targetId = createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false); final String targetId = createOpenaireId(rs.getString(TARGET_TYPE), rs.getString("target_id"), false);
final Relation r1 = new Relation(); final Relation r1 = new Relation();
final Relation r2 = new Relation(); final Relation r2 = new Relation();
if (rs.getString("source_type").equals("project")) { if (rs.getString(SOURCE_TYPE).equals("project")) {
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
r1.setRelType("resultProject"); r1.setRelType(RESULT_PROJECT);
r1.setSubRelType("outcome"); r1.setSubRelType(OUTCOME);
r1.setRelClass("produces"); r1.setRelClass(PRODUCES);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
r2.setRelType("resultProject"); r2.setRelType(RESULT_PROJECT);
r2.setSubRelType("outcome"); r2.setSubRelType(OUTCOME);
r2.setRelClass("isProducedBy"); r2.setRelClass(IS_PRODUCED_BY);
} else { } else {
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
r1.setRelType("resultResult"); r1.setRelType(RESULT_RESULT);
r1.setSubRelType("relationship"); r1.setSubRelType(RELATIONSHIP);
r1.setRelClass("isRelatedTo"); r1.setRelClass(IS_RELATED_TO);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
r2.setRelType("resultResult"); r2.setRelType(RESULT_RESULT);
r2.setSubRelType("relationship"); r2.setSubRelType(RELATIONSHIP);
r2.setRelClass("isRelatedTo"); r2.setRelClass(IS_RELATED_TO);
} }
r1.setSource(sourceId); r1.setSource(sourceId);
@ -457,8 +460,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final Boolean inferred = rs.getBoolean("inferred"); final Boolean inferred = rs.getBoolean("inferred");
final String trust = rs.getString("trust"); final String trust = rs.getString("trust");
return dataInfo( return dataInfo(
deletedbyinference, inferenceprovenance, inferred, false, deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust);
MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust);
} }
private Qualifier prepareQualifierSplitting(final String s) { private Qualifier prepareQualifierSplitting(final String s) {
@ -514,9 +516,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
if (arr.length == 3) { if (arr.length == 3) {
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null; final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null;
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null; final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null;
;
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;
;
if (issn != null || eissn != null || lissn != null) { if (issn != null || eissn != null || lissn != null) {
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
} }

View File

@ -3,27 +3,19 @@ package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays; import java.util.stream.Collectors;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import com.google.common.collect.Lists;
import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson; import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.GeoLocation;
import eu.dnetlib.dhp.schema.oaf.Instance;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
public class OafToOafMapper extends AbstractMdRecordToOafMapper { public class OafToOafMapper extends AbstractMdRecordToOafMapper {
@ -52,7 +44,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
@Override @Override
protected Qualifier prepareLanguages(final Document doc) { protected Qualifier prepareLanguages(final Document doc) {
return prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages"); return prepareQualifier(doc, "//dc:language", DNET_LANGUAGES, DNET_LANGUAGES);
} }
@Override @Override
@ -96,38 +88,43 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
final DataInfo info, final DataInfo info,
final KeyValue collectedfrom, final KeyValue collectedfrom,
final KeyValue hostedby) { final KeyValue hostedby) {
final List<Instance> res = new ArrayList<>();
for (final Object o : doc.selectNodes("//dc:identifier")) { final Instance instance = new Instance();
final String url = ((Node) o).getText().trim(); instance
if (url.startsWith("http")) { .setInstancetype(
final Instance instance = new Instance(); prepareQualifier(
instance.setUrl(Arrays.asList(url)); doc,
instance "//dr:CobjCategory",
.setInstancetype( DNET_PUBLICATION_RESOURCE,
prepareQualifier( DNET_PUBLICATION_RESOURCE));
doc, instance.setCollectedfrom(collectedfrom);
"//dr:CobjCategory", instance.setHostedby(hostedby);
"dnet:publication_resource", instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
"dnet:publication_resource")); instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
instance.setCollectedfrom(collectedfrom); instance
instance.setHostedby(hostedby); .setAccessright(
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); instance.setLicense(field(doc.valueOf("//oaf:license"), info));
instance instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
.setAccessright( instance
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); .setProcessingchargeamount(
instance.setLicense(field(doc.valueOf("//oaf:license"), info)); field(doc.valueOf("//oaf:processingchargeamount"), info));
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); instance
instance .setProcessingchargecurrency(
.setProcessingchargeamount( field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
field(doc.valueOf("//oaf:processingchargeamount"), info));
instance List<Node> nodes = Lists.newArrayList(doc.selectNodes("//dc:identifier"));
.setProcessingchargecurrency( instance
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info)); .setUrl(
res.add(instance); nodes
} .stream()
} .filter(n -> StringUtils.isNotBlank(n.getText()))
return res; .map(n -> n.getText().trim())
.filter(u -> u.startsWith("http"))
.distinct()
.collect(Collectors.toCollection(ArrayList::new)));
return Lists.newArrayList(instance);
} }
@Override @Override
@ -241,27 +238,16 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
final String otherId = createOpenaireId(50, originalId, false); final String otherId = createOpenaireId(50, originalId, false);
final Relation r1 = new Relation(); res
r1.setRelType("resultResult"); .add(
r1.setSubRelType("publicationDataset"); getRelation(
r1.setRelClass("isRelatedTo"); docId, otherId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
r1.setSource(docId); lastUpdateTimestamp));
r1.setTarget(otherId); res
r1.setCollectedfrom(Arrays.asList(collectedFrom)); .add(
r1.setDataInfo(info); getRelation(
r1.setLastupdatetimestamp(lastUpdateTimestamp); otherId, docId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
res.add(r1); lastUpdateTimestamp));
final Relation r2 = new Relation();
r2.setRelType("resultResult");
r2.setSubRelType("publicationDataset");
r2.setRelClass("isRelatedTo");
r2.setSource(otherId);
r2.setTarget(docId);
r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info);
r2.setLastupdatetimestamp(lastUpdateTimestamp);
res.add(r2);
} }
} }
return res; return res;

View File

@ -4,16 +4,15 @@ package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Field; import eu.dnetlib.dhp.schema.oaf.Field;
@ -27,6 +26,8 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
public class OdfToOafMapper extends AbstractMdRecordToOafMapper { public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
public static final String HTTP_DX_DOI_PREIFX = "http://dx.doi.org/";
public OdfToOafMapper(final Map<String, String> code2name) { public OdfToOafMapper(final Map<String, String> code2name) {
super(code2name); super(code2name);
} }
@ -62,7 +63,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
structuredProperty( structuredProperty(
((Node) o).getText(), ((Node) o).getText(),
prepareQualifier( prepareQualifier(
(Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"), (Node) o, "./@nameIdentifierScheme", DNET_PID_TYPES, DNET_PID_TYPES),
info)); info));
} }
return res; return res;
@ -76,18 +77,19 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
final KeyValue hostedby) { final KeyValue hostedby) {
final Instance instance = new Instance(); final Instance instance = new Instance();
final Set<String> url = new HashSet<>();
instance.setUrl(new ArrayList<>()); instance.setUrl(new ArrayList<>());
instance instance
.setInstancetype( .setInstancetype(
prepareQualifier( prepareQualifier(
doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource")); doc, "//dr:CobjCategory", DNET_PUBLICATION_RESOURCE, DNET_PUBLICATION_RESOURCE));
instance.setCollectedfrom(collectedfrom); instance.setCollectedfrom(collectedfrom);
instance.setHostedby(hostedby); instance.setHostedby(hostedby);
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
instance instance
.setAccessright( .setAccessright(
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
instance.setLicense(field(doc.valueOf("//oaf:license"), info)); instance.setLicense(field(doc.valueOf("//oaf:license"), info));
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info)); instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
@ -96,17 +98,18 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info)); field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) { for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
instance.getUrl().add(((Node) o).getText().trim()); url.add(((Node) o).getText().trim());
} }
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) { for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
instance.getUrl().add(((Node) o).getText().trim()); url.add(((Node) o).getText().trim());
} }
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) { for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim()); url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
} }
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) { for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) {
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim()); url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
} }
instance.getUrl().addAll(url);
return Arrays.asList(instance); return Arrays.asList(instance);
} }
@ -131,8 +134,8 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
((Node) o).getText(), ((Node) o).getText(),
"UNKNOWN", "UNKNOWN",
"UNKNOWN", "UNKNOWN",
"dnet:dataCite_date", DNET_DATA_CITE_DATE,
"dnet:dataCite_date", DNET_DATA_CITE_DATE,
info)); info));
} }
} }
@ -171,7 +174,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
@Override @Override
protected Qualifier prepareLanguages(final Document doc) { protected Qualifier prepareLanguages(final Document doc) {
return prepareQualifier(doc, "//datacite:language", "dnet:languages", "dnet:languages"); return prepareQualifier(doc, "//datacite:language", DNET_LANGUAGES, DNET_LANGUAGES);
} }
@Override @Override
@ -292,36 +295,29 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
final String otherId = createOpenaireId(50, originalId, false); final String otherId = createOpenaireId(50, originalId, false);
final String type = ((Node) o).valueOf("@relationType"); final String type = ((Node) o).valueOf("@relationType");
if (type.equals("IsSupplementTo")) { if (type.equalsIgnoreCase("IsSupplementTo")) {
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, docId, otherId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENT_TO, collectedFrom, info,
info, lastUpdateTimestamp));
lastUpdateTimestamp,
docId,
otherId,
"supplement",
"isSupplementTo"));
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, otherId, docId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENTED_BY, collectedFrom, info,
info, lastUpdateTimestamp));
lastUpdateTimestamp,
otherId,
docId,
"supplement",
"isSupplementedBy"));
} else if (type.equals("IsPartOf")) { } else if (type.equals("IsPartOf")) {
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf")); docId, otherId, RESULT_RESULT, PART, IS_PART_OF, collectedFrom, info,
lastUpdateTimestamp));
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts")); otherId, docId, RESULT_RESULT, PART, HAS_PARTS, collectedFrom, info,
lastUpdateTimestamp));
} else { } else {
} }
} }
@ -329,32 +325,12 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
return res; return res;
} }
private Relation prepareOtherResultRel(
final KeyValue collectedFrom,
final DataInfo info,
final long lastUpdateTimestamp,
final String source,
final String target,
final String subRelType,
final String relClass) {
final Relation r = new Relation();
r.setRelType("resultResult");
r.setSubRelType(subRelType);
r.setRelClass(relClass);
r.setSource(source);
r.setTarget(target);
r.setCollectedfrom(Arrays.asList(collectedFrom));
r.setDataInfo(info);
r.setLastupdatetimestamp(lastUpdateTimestamp);
return r;
}
@Override @Override
protected Qualifier prepareResourceType(final Document doc, final DataInfo info) { protected Qualifier prepareResourceType(final Document doc, final DataInfo info) {
return prepareQualifier( return prepareQualifier(
doc, doc,
"//*[local-name() = 'resource']//*[local-name() = 'resourceType']", "//*[local-name() = 'resource']//*[local-name() = 'resourceType']",
"dnet:dataCite_resource", DNET_DATA_CITE_RESOURCE,
"dnet:dataCite_resource"); DNET_DATA_CITE_RESOURCE);
} }
} }

View File

@ -1,27 +0,0 @@
package eu.dnetlib.dhp.oa.graph.raw.common;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class MigrationConstants {
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = qualifier(
"publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier(
"dataset", "dataset",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier(
"software", "software",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier(
"other", "other",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
"sysimport:crosswalk:repository", "sysimport:crosswalk:repository",
"dnet:provenanceActions", "dnet:provenanceActions");
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
"sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry",
"dnet:provenanceActions", "dnet:provenanceActions");
}

View File

@ -115,11 +115,11 @@
<delete path="${contentPath}/db_claims"/> <delete path="${contentPath}/db_claims"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_claims</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>-a</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
</java> </java>
<ok to="ImportODF_claims"/> <ok to="ImportODF_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -165,10 +165,10 @@
<delete path="${contentPath}/db_records"/> <delete path="${contentPath}/db_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java> </java>
<ok to="ImportODF"/> <ok to="ImportODF"/>
<error to="Kill"/> <error to="Kill"/>
@ -180,12 +180,12 @@
<delete path="${contentPath}/odf_records"/> <delete path="${contentPath}/odf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/odf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>ODF</arg> <arg>--mdFormat</arg><arg>ODF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="ImportOAF"/> <ok to="ImportOAF"/>
<error to="Kill"/> <error to="Kill"/>
@ -197,12 +197,12 @@
<delete path="${contentPath}/oaf_records"/> <delete path="${contentPath}/oaf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/oaf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>OAF</arg> <arg>--mdFormat</arg><arg>OAF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="wait_import"/> <ok to="wait_import"/>
<error to="Kill"/> <error to="Kill"/>
@ -231,11 +231,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>-t</arg><arg>${workingDir}/entities_claim</arg> <arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph_claims"/> <ok to="GenerateGraph_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -257,8 +257,8 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities_claim</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-g</arg><arg>${workingDir}/graph_claims</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_claims</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>
@ -280,11 +280,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
<arg>-t</arg><arg>${workingDir}/entities</arg> <arg>--targetPath</arg><arg>${workingDir}/entities</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph"/> <ok to="GenerateGraph"/>
<error to="Kill"/> <error to="Kill"/>
@ -307,8 +307,8 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=7680 --conf spark.sql.shuffle.partitions=7680
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
<arg>-g</arg><arg>${workingDir}/graph_raw</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -1,8 +1,8 @@
<workflow-app name="import db entities (step 1)" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="import DB entities" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>migrationPathStep1</name> <name>contentPath</name>
<description>the base path to store hdfs file</description> <description>path location to store (or reuse) content from the aggregator</description>
</property> </property>
<property> <property>
<name>postgresURL</name> <name>postgresURL</name>
@ -16,6 +16,7 @@
<name>postgresPassword</name> <name>postgresPassword</name>
<description>the password postgres</description> <description>the password postgres</description>
</property> </property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -28,31 +29,81 @@
<name>sparkExecutorCores</name> <name>sparkExecutorCores</name>
<description>number of cores used by single executor</description> <description>number of cores used by single executor</description>
</property> </property>
<property>
<name>oozieActionShareLibForSpark2</name>
<description>oozie action sharelib for spark 2.*</description>
</property>
<property>
<name>spark2ExtraListeners</name>
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
<description>spark 2.* extra listeners classname</description>
</property>
<property>
<name>spark2SqlQueryExecutionListeners</name>
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
<description>spark 2.* sql query execution listeners classname</description>
</property>
<property>
<name>spark2YarnHistoryServerAddress</name>
<description>spark 2.* yarn history server address</description>
</property>
<property>
<name>spark2EventLogDir</name>
<description>spark 2.* event log dir location</description>
</property>
</parameters> </parameters>
<start to="ResetWorkingPath"/> <global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration>
</global>
<start to="ImportDB"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="ResetWorkingPath"> <action name="ImportDB">
<fs> <java>
<delete path='${migrationPathStep1}/db_records'/> <prepare>
</fs> <delete path="${contentPath}/db_records"/>
<ok to="ImportDB"/> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java>
<ok to="ImportDB_claims"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="ImportDB"> <action name="ImportDB_claims">
<java> <java>
<job-tracker>${jobTracker}</job-tracker> <prepare>
<name-node>${nameNode}</name-node> <delete path="${contentPath}/db_claims"/>
<main-class>eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication</main-class> </prepare>
<arg>-p</arg><arg>${migrationPathStep1}/db_records</arg> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--action</arg><arg>claims</arg>
</java> </java>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -113,7 +113,7 @@ public class XmlIndexingJob {
}); });
} }
private static String toIndexRecord(Transformer tr, final String record) { protected static String toIndexRecord(Transformer tr, final String record) {
final StreamResult res = new StreamResult(new StringWriter()); final StreamResult res = new StreamResult(new StringWriter());
try { try {
tr.transform(new StreamSource(new StringReader(record)), res); tr.transform(new StreamSource(new StringReader(record)), res);
@ -135,7 +135,7 @@ public class XmlIndexingJob {
* @throws IOException could happen * @throws IOException could happen
* @throws TransformerException could happen * @throws TransformerException could happen
*/ */
private static String getLayoutTransformer(String format, String fields, String xslt) protected static String getLayoutTransformer(String format, String fields, String xslt)
throws TransformerException { throws TransformerException {
final Transformer layoutTransformer = SaxonTransformerFactory.newInstance(xslt); final Transformer layoutTransformer = SaxonTransformerFactory.newInstance(xslt);

View File

@ -9,10 +9,7 @@ import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.io.StringReader; import java.io.StringReader;
import java.io.StringWriter; import java.io.StringWriter;
import java.util.ArrayList; import java.util.*;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import javax.xml.transform.*; import javax.xml.transform.*;
@ -47,7 +44,7 @@ import eu.dnetlib.dhp.schema.oaf.Result;
public class XmlRecordFactory implements Serializable { public class XmlRecordFactory implements Serializable {
public static final String REL_SUBTYPE_DEDUP = "dedup"; private static final String REL_SUBTYPE_DEDUP = "dedup";
private final Map<String, LongAccumulator> accumulators; private final Map<String, LongAccumulator> accumulators;
private final Set<String> specialDatasourceTypes; private final Set<String> specialDatasourceTypes;
@ -100,8 +97,8 @@ public class XmlRecordFactory implements Serializable {
final List<String> relations = je final List<String> relations = je
.getLinks() .getLinks()
.stream() .stream()
.filter(t -> !REL_SUBTYPE_DEDUP.equalsIgnoreCase(t.getRelation().getSubRelType())) .filter(link -> !isDuplicate(link))
.map(link -> mapRelation(link, templateFactory, contexts)) .map(link -> mapRelation(contexts, templateFactory, type, link))
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
final String mainType = ModelSupport.getMainType(type); final String mainType = ModelSupport.getMainType(type);
@ -936,7 +933,7 @@ public class XmlRecordFactory implements Serializable {
metadata.add(XmlSerializationUtils.mapQualifier("datasourcetypeui", dsType)); metadata.add(XmlSerializationUtils.mapQualifier("datasourcetypeui", dsType));
} }
private String mapRelation(Tuple2 link, TemplateFactory templateFactory, Set<String> contexts) { private List<String> mapFields(Tuple2 link, Set<String> contexts) {
final Relation rel = link.getRelation(); final Relation rel = link.getRelation();
final RelatedEntity re = link.getRelatedEntity(); final RelatedEntity re = link.getRelatedEntity();
final String targetType = link.getRelatedEntity().getType(); final String targetType = link.getRelatedEntity().getType();
@ -1040,38 +1037,47 @@ public class XmlRecordFactory implements Serializable {
default: default:
throw new IllegalArgumentException("invalid target type: " + targetType); throw new IllegalArgumentException("invalid target type: " + targetType);
} }
final DataInfo info = rel.getDataInfo();
final String scheme = ModelSupport.getScheme(re.getType(), targetType);
if (StringUtils.isBlank(scheme)) {
throw new IllegalArgumentException(
String.format("missing scheme for: <%s - %s>", re.getType(), targetType));
}
final String accumulatorName = getRelDescriptor(rel.getRelType(), rel.getSubRelType(), rel.getRelClass()); final String accumulatorName = getRelDescriptor(rel.getRelType(), rel.getSubRelType(), rel.getRelClass());
if (accumulators.containsKey(accumulatorName)) { if (accumulators.containsKey(accumulatorName)) {
accumulators.get(accumulatorName).add(1); accumulators.get(accumulatorName).add(1);
} }
return metadata;
}
private String mapRelation(Set<String> contexts, TemplateFactory templateFactory, EntityType type, Tuple2 link) {
final Relation rel = link.getRelation();
final String targetType = link.getRelatedEntity().getType();
final String scheme = ModelSupport.getScheme(type.toString(), targetType);
if (StringUtils.isBlank(scheme)) {
throw new IllegalArgumentException(
String.format("missing scheme for: <%s - %s>", type.toString(), targetType));
}
final HashSet<String> fields = Sets.newHashSet(mapFields(link, contexts));
return templateFactory return templateFactory
.getRel( .getRel(
targetType, rel.getTarget(), Sets.newHashSet(metadata), rel.getRelClass(), scheme, info); targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo());
} }
private List<String> listChildren( private List<String> listChildren(
final OafEntity entity, JoinedEntity je, TemplateFactory templateFactory) { final OafEntity entity, JoinedEntity je, TemplateFactory templateFactory) {
final List<String> children = Lists.newArrayList();
EntityType entityType = EntityType.valueOf(je.getEntity().getType()); EntityType entityType = EntityType.valueOf(je.getEntity().getType());
children List<String> children = je
.addAll( .getLinks()
je .stream()
.getLinks() .filter(link -> isDuplicate(link))
.stream() .map(link -> {
.filter(link -> REL_SUBTYPE_DEDUP.equalsIgnoreCase(link.getRelation().getSubRelType())) final String targetType = link.getRelatedEntity().getType();
.map(link -> mapRelation(link, templateFactory, null)) final String name = ModelSupport.getMainType(EntityType.valueOf(targetType));
.collect(Collectors.toCollection(ArrayList::new))); final HashSet<String> fields = Sets.newHashSet(mapFields(link, null));
return templateFactory
.getChild(name, link.getRelatedEntity().getId(), Lists.newArrayList(fields));
})
.collect(Collectors.toCollection(ArrayList::new));
if (MainEntityType.result.toString().equals(ModelSupport.getMainType(entityType))) { if (MainEntityType.result.toString().equals(ModelSupport.getMainType(entityType))) {
final List<Instance> instances = ((Result) entity).getInstance(); final List<Instance> instances = ((Result) entity).getInstance();
@ -1178,6 +1184,10 @@ public class XmlRecordFactory implements Serializable {
return children; return children;
} }
private boolean isDuplicate(Tuple2 link) {
return REL_SUBTYPE_DEDUP.equalsIgnoreCase(link.getRelation().getSubRelType());
}
private List<String> listExtraInfo(OafEntity entity) { private List<String> listExtraInfo(OafEntity entity) {
final List<ExtraInfo> extraInfo = entity.getExtraInfo(); final List<ExtraInfo> extraInfo = entity.getExtraInfo();
return extraInfo != null return extraInfo != null

View File

@ -405,6 +405,9 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.speculation=false
--conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--inputPath</arg><arg>${workingDir}/xml</arg> <arg>--inputPath</arg><arg>${workingDir}/xml</arg>
<arg>--isLookupUrl</arg> <arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg> <arg>${isLookupUrl}</arg>

View File

@ -1,3 +1,3 @@
<name$if(hasId)$ objidentifier="$id$"$else$$endif>> <$name$$if(hasId)$ objidentifier="$id$"$else$$endif$>
$metadata:{ it | $it$ }$ $metadata:{$it$}$
</name> </$name$>

View File

@ -1,42 +0,0 @@
package eu.dnetlib.dhp.oa.provision;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.junit.jupiter.api.BeforeEach;
public class GraphJoinerTest {
private final ClassLoader cl = getClass().getClassLoader();
private Path workingDir;
private Path inputDir;
private Path outputDir;
@BeforeEach
public void before() throws IOException {
workingDir = Files.createTempDirectory("promote_action_set");
inputDir = workingDir.resolve("input");
outputDir = workingDir.resolve("output");
}
private static void copyFiles(Path source, Path target) throws IOException {
Files
.list(source)
.forEach(
f -> {
try {
if (Files.isDirectory(f)) {
Path subTarget = Files.createDirectories(target.resolve(f.getFileName()));
copyFiles(f, subTarget);
} else {
Files.copy(f, target.resolve(f.getFileName()));
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
});
}
}

View File

@ -0,0 +1,39 @@
package eu.dnetlib.dhp.oa.provision;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import eu.dnetlib.dhp.utils.saxon.SaxonTransformerFactory;
public class IndexRecordTransformerTest {
@Test
public void testTrasformRecord() throws IOException, TransformerException {
String fields = IOUtils.toString(getClass().getResourceAsStream("fields.xml"));
String record = IOUtils.toString(getClass().getResourceAsStream("record.xml"));
String xslt = IOUtils.toString(getClass().getResourceAsStream("layoutToRecordTransformer.xsl"));
String transformer = XmlIndexingJob.getLayoutTransformer("DMF", fields, xslt);
Transformer tr = SaxonTransformerFactory.newInstance(transformer);
String a = XmlIndexingJob.toIndexRecord(tr, record);
System.out.println(a);
}
}

View File

@ -0,0 +1,166 @@
<LAYOUT name="index">
<FIELDS>
<FIELD indexable="false" name="oafentity" result="true" stat="false" tokenizable="false" xpath="//*[local-name() = 'entity']"/>
<FIELD indexable="true" name="oaftype" result="false" stat="false" tokenizable="false" value="local-name(//*[local-name()='entity']/*[local-name() != 'extraInfo'])"/>
<FIELD indexable="true" name="objIdentifier" result="false" stat="false" tokenizable="false" xpath="//header/dri:objIdentifier"/><!-- DATASOURCE FIELDS -->
<FIELD indexable="true" name="datasourceofficialname" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/officialname"/>
<FIELD indexable="true" name="datasourceenglishname" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/englishname"/>
<FIELD indexable="true" name="datasourceoddescription" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/oddescription"/>
<FIELD indexable="true" name="datasourceodsubjects" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/odsubjects"/>
<FIELD indexable="true" name="datasourceodlanguages" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/odlanguages"/>
<FIELD indexable="true" name="datasourceodcontenttypes" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/odcontenttypes"/>
<FIELD indexable="true" multivalued="false" name="datasourcetypename" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/datasourcetype/@classname"/>
<FIELD indexable="true" multivalued="false" name="datasourcetypeuiid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/datasourcetypeui/@classid"/>
<FIELD indexable="true" multivalued="false" name="datasourcetypeuiname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/datasourcetypeui/@classname"/>
<FIELD indexable="true" multivalued="false" name="datasourcecompatibilityid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/openairecompatibility/@classid"/>
<FIELD indexable="true" multivalued="false" name="datasourcecompatibilityname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/openairecompatibility/@classname"/>
<FIELD indexable="true" multivalued="true" name="datasourcesubject" result="false" stat="false" type="ngramtext" xpath="//*[local-name()='entity']/*[local-name()='datasource']/subjects"/>
<FIELD indexable="true" name="versioning" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='datasource']/versioning"/><!-- ORGANIZATION FIELDS -->
<FIELD indexable="true" name="organizationlegalshortname" result="false" stat="false" type="ngramtext" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='organization']//legalshortname)"/>
<FIELD indexable="true" name="organizationlegalname" result="false" stat="false" type="ngramtext" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='organization']//legalname)"/>
<FIELD indexable="true" name="organizationalternativenames" result="false" stat="false" type="ngramtext" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='organization']//alternativeNames)"/>
<FIELD indexable="true" name="organizationeclegalbody" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/eclegalbody"/>
<FIELD indexable="true" name="organizationeclegalperson" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/eclegalperson"/>
<FIELD indexable="true" name="organizationecnonprofit" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecnonprofit"/>
<FIELD indexable="true" name="organizationecresearchorganization" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecresearchorganization"/>
<FIELD indexable="true" name="organizationecinternationalorganizationeurinterests" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecinternationalorganizationeurinterests"/>
<FIELD indexable="true" name="organizationecinternationalorganization" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecinternationalorganization"/>
<FIELD indexable="true" name="organizationecenterprise" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecenterprise"/>
<FIELD indexable="true" name="organizationecsmevalidated" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecsmevalidated"/>
<FIELD indexable="true" name="organizationecnutscode" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/ecnutscode"/>
<FIELD indexable="true" multivalued="false" name="organizationcountryname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='organization']/country/@classname"/><!-- PROJECT FIELDS -->
<FIELD indexable="true" name="projectcode" result="false" stat="false" type="ngramtext" xpath="//*[local-name()='entity']/*[local-name()='project']/code"/>
<FIELD indexable="true" name="projectcode_nt" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/code"/>
<FIELD indexable="true" name="projectacronym" result="false" stat="false" type="ngramtext" xpath="//*[local-name()='entity']/*[local-name()='project']/acronym"/>
<FIELD indexable="true" name="projecttitle" result="false" stat="false" type="ngramtext" xpath="//*[local-name()='entity']/*[local-name()='project']/title"/>
<FIELD indexable="true" multivalued="false" name="projectstartdate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='project']/startdate"/>
<FIELD indexable="true" multivalued="false" name="projectstartyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='project']/startdate)"/>
<FIELD indexable="true" multivalued="false" name="projectenddate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='project']/enddate"/>
<FIELD indexable="true" multivalued="false" name="projectendyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='project']/enddate)"/>
<FIELD indexable="true" multivalued="false" name="projectcallidentifier" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/callidentifier"/>
<FIELD indexable="true" name="projectkeywords" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='project']/keywords"/>
<FIELD indexable="true" multivalued="false" name="projectduration" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/duration"/>
<FIELD indexable="true" multivalued="false" name="projectecsc39" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='project']/ecsc39)"/>
<FIELD indexable="true" multivalued="false" name="projectoamandatepublications" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/oamandatepublications"/>
<FIELD indexable="true" multivalued="false" name="projectecarticle29_3" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/ecarticle29_3"/>
<FIELD indexable="true" name="projectsubject" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/subjects"/>
<FIELD indexable="true" multivalued="false" name="projectcontracttypename" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/contracttype/@classname"/>
<FIELD indexable="true" name="fundinglevel0_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_0/id"/>
<FIELD indexable="true" name="fundinglevel0_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_0/name"/>
<FIELD indexable="true" name="fundinglevel0_description" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_0/description"/>
<FIELD indexable="true" name="fundinglevel1_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_1/id"/>
<FIELD indexable="true" name="fundinglevel1_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_1/name"/>
<FIELD indexable="true" name="fundinglevel1_description" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_1/description"/>
<FIELD indexable="true" name="fundinglevel2_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_2/id"/>
<FIELD indexable="true" name="fundinglevel2_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_2/name"/>
<FIELD indexable="true" name="fundinglevel2_description" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree//funding_level_2/description"/><!-- PROJECTS' FUNDER FIELDS: indexable only with the new funding path/context handling -->
<FIELD indexable="true" name="funder" result="false" stat="false" tokenizable="false" value="concat(./id/text(), '||', ./name/text(), '||', ./shortname/text())" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder"/>
<FIELD indexable="true" name="fundershortname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/shortname"/>
<FIELD indexable="true" name="funderid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/id"/>
<FIELD indexable="true" name="fundername" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/name"/>
<FIELD indexable="true" name="funderoriginalname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/originalname"/>
<FIELD indexable="true" name="funderjurisdiction" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/jurisdiction"/><!-- RESULT FIELDS -->
<FIELD indexable="true" name="resulttitle" result="false" stat="false" xpath="//*[local-name() = 'entity']/*[local-name() ='result']/title | //*[local-name()='entity']/*[local-name()='result']/children/result/title"/>
<FIELD indexable="true" name="resultsubject" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject)"/>
<FIELD indexable="true" name="resultsubjectclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject/@classname)"/>
<FIELD indexable="true" multivalued="false" name="resultembargoenddate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/embargoenddate"/>
<FIELD indexable="true" multivalued="false" name="resultembargoendyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/embargoenddate)"/>
<FIELD indexable="true" multivalued="false" name="resulttypeid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/resulttype/@classid"/>
<FIELD indexable="true" multivalued="false" name="resulttypename" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/resulttype/@classname"/>
<FIELD indexable="true" multivalued="false" name="resultlanguagename" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/language/@classname"/>
<FIELD indexable="true" name="resultpublisher" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']/*[local-name()='publisher']"/>
<FIELD indexable="true" name="resultdescription" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']//*[local-name()='description']"/>
<FIELD indexable="true" name="resultlicense" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/license"/>
<FIELD indexable="true" name="resultaccessright" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/accessright/@classname"/>
<FIELD indexable="true" multivalued="false" name="resultbestaccessright" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/bestaccessright/@classname)"/>
<FIELD indexable="true" multivalued="false" name="resultdateofacceptance" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/dateofacceptance"/>
<FIELD indexable="true" multivalued="false" name="resultacceptanceyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/dateofacceptance)"/>
<FIELD indexable="true" multivalued="true" name="resultauthor" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']/creator"/>
<FIELD indexable="true" multivalued="true" name="resultauthor_nt" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator"/>
<FIELD indexable="true" multivalued="true" name="authorid" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']"/>
<FIELD indexable="true" multivalued="true" name="authoridtype" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']/local-name()"/>
<FIELD indexable="true" name="resulthostingdatasource" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./@id, '||', ./@name))" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']"/>
<FIELD indexable="true" name="resulthostingdatasourceid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@id)"/>
<FIELD indexable="true" name="resulthostingdatasourcename" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@name)"/>
<FIELD indexable="true" name="instancetypename" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='instancetype']/@classname)"/>
<FIELD indexable="true" name="resultdupid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*//children/result/@objidentifier"/>
<FIELD indexable="true" name="organizationdupid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*//children/organization/@objidentifier"/>
<FIELD indexable="true" name="externalrefsite" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//children/externalreference/sitename)"/>
<FIELD indexable="true" name="externalreflabel" result="false" stat="false" tokenizable="true" xpath="distinct-values(//*[local-name()='entity']/*//children/externalreference/label)"/>
<FIELD indexable="true" name="externalrefclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//children/externalreference/qualifier/@classid)"/>
<FIELD indexable="true" name="externalrefid" result="false" stat="false" tokenizable="false" xpath="(//*[local-name()='entity']/*//children/externalreference/refidentifier)"/>
<FIELD indexable="true" name="resultidentifier" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/webresource/*[local-name()='url'])"/>
<FIELD indexable="true" name="resultsource" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/source)"/><!-- REL FIELDS -->
<FIELD indexable="true" name="reldatasourcecompatibilityid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='datasource']/openairecompatibility/@classid)"/>
<FIELD indexable="true" name="relproject" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./text(), '||', dnet:pickFirst(../acronym/text(), ../title/text())))" xpath="//*[local-name()='entity']/*//rel/to[@type='project']"/>
<FIELD indexable="true" name="relprojectid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/to[@type='project'])"/>
<FIELD indexable="true" name="relprojectcode" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='project']/code)"/>
<FIELD indexable="true" name="relprojectname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='project']/acronym)"/>
<FIELD indexable="true" name="relprojecttitle" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='project']/title)"/>
<FIELD indexable="true" name="relcontracttypeid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='project']/contracttype/@classid)"/>
<FIELD indexable="true" name="relcontracttypename" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='project']/contracttype/@classname)"/>
<FIELD indexable="true" name="relorganizationcountryid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='organization']/country/@classid)"/>
<FIELD indexable="true" name="relorganizationcountryname" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='organization']/country/@classname)"/>
<FIELD indexable="true" name="relorganizationid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/to[@type='organization'])"/>
<FIELD indexable="true" name="relorganizationname" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='organization']/legalname)"/>
<FIELD indexable="true" name="relorganizationshortname" result="false" stat="false" xpath="distinct-values(//*[local-name()='entity']/*//rel[./to/@type='organization']/legalshortname)"/>
<FIELD indexable="true" name="relresultid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/to[@type='result'])"/>
<FIELD indexable="true" name="relresulttype" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/resulttype/@classid)"/>
<FIELD indexable="true" name="relclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/to/@class)"/>
<FIELD indexable="true" name="relfundinglevel0_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_0"/>
<FIELD indexable="true" name="relfundinglevel0_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_0/@name/string()"/>
<FIELD indexable="true" name="relfundinglevel1_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_1"/>
<FIELD indexable="true" name="relfundinglevel1_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_1/@name/string()"/>
<FIELD indexable="true" name="relfundinglevel2_id" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_2"/>
<FIELD indexable="true" name="relfundinglevel2_name" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//rel/funding/funding_level_2/@name/string()"/>
<FIELD indexable="true" name="relinferred" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/@inferred)"/>
<FIELD indexable="true" name="reltrust" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/@trust)"/>
<FIELD indexable="true" name="relinferenceprovenance" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/@inferenceprovenance)"/>
<FIELD indexable="true" name="relprovenanceactionclassid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/@provenanceaction)"/><!-- PROJECTS' FUNDER FIELDS: indexable only with the new funding path/context handling -->
<FIELD indexable="true" name="relfunder" result="false" stat="false" tokenizable="false" value="distinct-values(concat(@id, '||', @name, '||', @shortname))" xpath="//*[local-name()='entity']//rel/funding/funder"/>
<FIELD indexable="true" name="relfunderid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/funding/funder/@id)"/>
<FIELD indexable="true" name="relfundershortname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/funding/funder/@shortname)"/>
<FIELD indexable="true" name="relfundername" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/funding/funder/@name)"/>
<FIELD indexable="true" name="relfunderjurisdiction" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']//rel/funding/funder/@jurisdiction)"/><!-- Collected from of the related entity. Available for result-result relationships -->
<FIELD indexable="true" name="relcollectedfromid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/collectedfrom/@id)"/>
<FIELD indexable="true" name="relcollectedfromname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*//rel/collectedfrom/@name)"/><!-- COMMON FIELDS -->
<FIELD indexable="true" multivalued="false" name="dateofcollection" result="false" stat="false" type="pdate" value="//header/*[local-name()='dateOfCollection']"/>
<FIELD indexable="true" name="collectedfrom" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./@id, '||', ./@name))" xpath="//*[local-name()='entity']/*/*[local-name()='collectedfrom'] | //*[local-name()='entity']/*//*[local-name() = 'instance']/*[local-name()='collectedfrom']"/>
<FIELD indexable="true" name="collectedfromdatasourceid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*/*[local-name()='collectedfrom']/@id | //*[local-name()='entity']/*//*[local-name() = 'instance']/*[local-name()='collectedfrom']/@id)"/>
<FIELD indexable="true" name="collectedfromname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*/*[local-name()='collectedfrom']/@name | //*[local-name()='entity']/*//*[local-name() = 'instance']/*[local-name()='collectedfrom']/@name)"/>
<FIELD indexable="true" name="originalid" result="false" stat="false" tokenizable="false" type="string_ci" xpath="//*[local-name()='entity']/*/*[local-name()='originalId']"/>
<FIELD indexable="true" name="pid" result="false" stat="false" tokenizable="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/pid/text()"/>
<FIELD indexable="true" name="pidclassid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/pid/@classid)"/>
<FIELD indexable="true" name="pidclassname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/pid/@classname)"/>
<FIELD indexable="true" name="inferred" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//datainfo/inferred"/>
<FIELD indexable="true" name="deletedbyinference" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//datainfo/deletedbyinference"/>
<FIELD indexable="true" name="trust" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//datainfo/trust"/>
<FIELD indexable="true" name="inferenceprovenance" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//datainfo/inferenceprovenance"/>
<FIELD indexable="true" name="provenanceactionclassid" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']//datainfo/provenanceaction/@classid"/>
<FIELD indexable="true" name="contextid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/@id)"/>
<FIELD indexable="true" name="contexttype" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/@type)"/>
<FIELD indexable="true" name="contextname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/@label)"/><!-- Need special fields for community (research initiative) context in order to exclude funders from the context browse -->
<FIELD indexable="true" name="community" result="false" stat="false" tokenizable="false" value="distinct-values(concat(@id, '||', @label))" xpath="//*[local-name()='entity']/*[local-name()='result']/context[@type='community' or @type='ri']"/>
<FIELD indexable="true" name="communityname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context[@type='community' or @type='ri']/@label)"/>
<FIELD indexable="true" name="communityid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context[@type='community' or @type='ri']/@id)"/>
<FIELD indexable="true" name="categoryid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/category/@id)"/>
<FIELD indexable="true" name="categoryname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/category/@label)"/>
<FIELD indexable="true" name="conceptid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/category//concept/@id)"/>
<FIELD indexable="true" name="conceptname" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/context/category//concept/@label)"/><!-- new index field for country info from different xpaths for any type of entity -->
<FIELD indexable="true" name="country" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*/country/@classid | //*[local-name()='entity']/*//rel[./to/@type='organization']/country/@classid | //*[local-name()='entity']//funder/@jurisdiction)"/><!-- COUNTER FIELDS -->
<FIELD header="true" indexable="true" multivalued="false" name="counter_dedup" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_dedup/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_authorship" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_authorship/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_participation" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_participation/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_similarity" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_similarity/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_publicationdataset" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_publicationDataset/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_publicationdataset_claimed" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_publicationDataset_claimed/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_publicationdataset_collected" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_publicationDataset_collected/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_publicationdataset_inferred" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_publicationDataset_inferred/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_outcome" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_outcome/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_outcome_claimed" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_outcome_claimed/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_outcome_collected" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_outcome_collected/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_outcome_inferred" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_outcome_inferred/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_affiliation" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_affiliation/@value"/>
<FIELD header="true" indexable="true" multivalued="false" name="counter_doi" result="false" stored="true" type="pint" xpath="/record/result/*[local-name()='header']/*[local-name()='counters']/counter_doi/@value"/>
</FIELDS>
</LAYOUT>

View File

@ -0,0 +1,94 @@
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:nxsl="http://www.w3.org/1999/XSL/TransformXX" version="3.0">
<xsl:output method="xml" omit-xml-declaration="yes"/>
<xsl:namespace-alias result-prefix="xsl" stylesheet-prefix="nxsl"/>
<xsl:param name="format"/>
<xsl:template match="/">
<xsl:apply-templates select="//LAYOUT"/>
</xsl:template>
<xsl:template match="LAYOUT">
<nxsl:stylesheet xmlns:dnet="http://www.d-net.research-infrastructures.eu/saxon-extension" xmlns:dri="http://www.driver-repository.eu/namespace/dri" exclude-result-prefixes="dnet" version="3.0">
<nxsl:output method="xml" omit-xml-declaration="yes" version="1.0"/>
<nxsl:variable name="format">
<xsl:value-of select="$format"/>
</nxsl:variable>
<nxsl:template match="/">
<indexRecord>
<indexRecordIdentifier>
<nxsl:value-of select="//dri:objIdentifier"/>
</indexRecordIdentifier>
<targetFields>
<nxsl:if test="count(//*[local-name()='metadata']/*) &gt; 0">
<xsl:apply-templates select="FIELDS/FIELD[@indexable='true']"/>
</nxsl:if>
</targetFields>
<dnetResult>
<nxsl:copy-of select="/*[local-name()='record']/*[local-name()='result']/*[local-name()='header']"/>
<metadata>
<xsl:apply-templates mode="result" select="FIELDS/FIELD"/>
</metadata>
</dnetResult>
</indexRecord>
</nxsl:template>
</nxsl:stylesheet>
</xsl:template>
<xsl:template match="FIELD[@indexable='true']">
<xsl:choose>
<xsl:when test="@constant">
<xsl:element name="{@name}">
<xsl:value-of select="@constant"/>
</xsl:element>
</xsl:when>
<xsl:when test="@value and not(@xpath)">
<xsl:choose>
<xsl:when test="contains(@type, 'date')">
<nxsl:variable name="{@name}" select="dnet:normalizeDate(normalize-space({normalize-space(@value)}))"/>
<nxsl:if test="string-length(${@name}) &gt; 0">
<nxsl:element name="{@name}">
<nxsl:value-of select="${@name}"/>
</nxsl:element>
</nxsl:if>
</xsl:when>
<xsl:otherwise>
<nxsl:element name="{@name}">
<nxsl:value-of select="{@value}"/>
</nxsl:element>
</xsl:otherwise>
</xsl:choose>
</xsl:when>
<xsl:otherwise>
<xsl:variable name="value">
<xsl:choose>
<xsl:when test="@value">
<xsl:value-of select="@value"/>
</xsl:when>
<xsl:otherwise>
.
</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<nxsl:for-each select="{@xpath}">
<xsl:element name="{@name}">
<xsl:choose>
<xsl:when test="@tokenizable='false'">
<nxsl:value-of select="normalize-space({normalize-space($value)})"/>
</xsl:when>
<xsl:otherwise>
<nxsl:value-of select="{normalize-space($value)}"/>
</xsl:otherwise>
</xsl:choose>
</xsl:element>
</nxsl:for-each>
</xsl:otherwise>
</xsl:choose>
</xsl:template>
<xsl:template match="FIELD" mode="result">
<xsl:if test="@result='true'">
<nxsl:copy-of select="{@xpath}"/>
</xsl:if>
</xsl:template>
<xsl:template match="FIELD" mode="header">
<xsl:if test="@header='true'">
<nxsl:copy-of select="{@xpath}"/>
</xsl:if>
</xsl:template>
</xsl:stylesheet>

View File

@ -0,0 +1,750 @@
<result xmlns:dri="http://www.driver-repository.eu/namespace/dri">
<header xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<dri:objIdentifier>dedup_wf_001::113ca964590682d906a3588d3d6b4838</dri:objIdentifier>
<dri:dateOfCollection>2020-03-15T05:46:43.509Z</dri:dateOfCollection>
<dri:dateOfTransformation>2020-03-15T21:17:13.902Z</dri:dateOfTransformation>
</header>
<metadata>
<oaf:entity xmlns:oaf="http://namespace.openaire.eu/oaf"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://namespace.openaire.eu/oaf https://www.openaire.eu/schema/1.0/oaf-1.0.xsd">
<oaf:result>
<collectedfrom name="scholExplorer"
id="openaire____::e034d6a11054f5ade9221ebac484e864"/>
<collectedfrom name="UnpayWall" id="openaire____::8ac8380272269217cb09a928c8caa993"/>
<collectedfrom name="DOAJ-Articles"
id="driver______::bee53aa31dc2cbb538c10c2b65fa5824"/>
<collectedfrom name="Crossref" id="openaire____::081b82f96300b6a6e3d282bad31cb6e2"/>
<collectedfrom name="Microsoft Academic Graph"
id="openaire____::5f532a3fc4f1ea403f37070f59a7a53a"/>
<collectedfrom name="PubMed Central"
id="opendoar____::eda80a3d5b344bc40f3bc04f65b7a357"/>
<originalId>oai:pubmedcentral.nih.gov:5657733</originalId>
<originalId>oai:doaj.org/article:f26495a4c1d149099049e1a604fa1256</originalId>
<originalId>10.1177/0963689717714320</originalId>
<pid classid="pmid" classname="pmid" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">28933215</pid>
<pid classid="pmc" classname="pmc" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">PMC5657733</pid>
<pid classid="doi" classname="doi" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">10.1177/0963689717714320</pid>
<title classid="main title" classname="main title" schemeid="dnet:dataCite_title"
schemename="dnet:dataCite_title" inferred="false">Extract Protects Model Neurons
from Traumatic Injury</title>
<bestaccessright classid="OPEN" classname="Open Access" schemeid="dnet:access_modes"
schemename="dnet:access_modes"/>
<creator rank="1">Jain, Subhash C.</creator>
<creator rank="2">Citron, Bruce A.</creator>
<creator rank="3">Vijayalakshmi Ravindranath</creator>
<creator rank="4">Saykally, Jessica N.</creator>
<creator rank="5">Keeley, Kristen L.</creator>
<creator rank="6">Haris Hatic</creator>
<dateofacceptance>2017-06-01</dateofacceptance>
<description>Withania somnifera has been used in traditional medicine for a variety
of neural disorders. Recently, chronic neurodegenerative conditions have been
shown to benefit from treatment with this extract. To evaluate the action of
this extract on traumatically injured neurons, the efficacy of W. somnifera root
extract as a neuroprotective agent was examined in cultured model neurons
exposed to an in vitro injury system designed to mimic mild traumatic brain
injury (TBI). Neuronal health was evaluated by staining with annexin V (an
early, apoptotic feature) and monitoring released lactate dehydrogenase activity
(a terminal cell loss parameter). Potential mechanisms underlying the observed
neuroprotection were examined. Additionally, morphological changes were
monitored following injury and treatment. Although no differences were found in
the expression of the antioxidant transcription factor nuclear factor erythroid
2-like 2 (Nrf2) or other Nrf2-related downstream components, significant changes
were seen in apoptotic signaling. Treatment with the extract resulted in an
increased length of neurites projecting from the neuronal cell body after
injury. W. somnifera extract treatment also resulted in reduced cell death in
the model neuron TBI system. The cell death factor Bax was involved (its
expression was reduced 2-fold by the treatment) and injury-induced reduction in
neurite lengths and numbers was reversed by the treatment. This all indicates
that W. somnifera root extract was neuroprotective and could have therapeutic
potential to target factors involved in secondary injury and long-term sequelae
of mild TBI.</description>
<embargoenddate/>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false">Withania
somnifera</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false">R</subject>
<subject classid="keyword" classname="keyword" schemeid="dnet:subject"
schemename="dnet:subject" inferred="false">Cell Biology</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false"
>neuroprotection</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false"
>SH-SY5Y</subject>
<subject classid="keyword" classname="keyword" schemeid="dnet:subject"
schemename="dnet:subject" inferred="false">Biomedical Engineering</subject>
<subject classid="keyword" classname="keyword" schemeid="dnet:subject"
schemename="dnet:subject" inferred="false">Transplantation</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false">traumatic
brain injury</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false"
>neurites</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false"
>Ayurveda</subject>
<subject classid="keyword" classname="keyword"
schemeid="dnet:subject_classification_typologies"
schemename="dnet:subject_classification_typologies" inferred="false"
>Medicine</subject>
<language classid="eng" classname="English" schemeid="dnet:languages"
schemename="dnet:languages"/>
<relevantdate classid="dnet:date" classname="dnet:date" schemeid="dnet:date"
schemename="dnet:date" inferred="false">2018-11-13</relevantdate>
<relevantdate classid="published-online" classname="published-online"
schemeid="dnet:dataCite_date" schemename="dnet:dataCite_date" inferred="false"
>2017-6-30</relevantdate>
<relevantdate classid="published-print" classname="published-print"
schemeid="dnet:dataCite_date" schemename="dnet:dataCite_date" inferred="false"
>2017-7-1</relevantdate>
<publisher>SAGE Publishing</publisher>
<source>Cell Transplantation, Vol 26 (2017)</source>
<source>Cell Transplantation</source>
<resulttype classid="publication" classname="publication"
schemeid="dnet:result_typologies" schemename="dnet:result_typologies"/>
<resourcetype/>
<journal issn="0963-6897" eissn="1555-3892" ep="1201" iss="7" sp="1193" vol="26"
>Cell Transplantation</journal>
<context id="NIH" label="National Institutes of Health" type="funding">
<category id="NIH::VETERANS AFFAIRS" label="VETERANS AFFAIRS"/>
</context>
<datainfo>
<inferred>true</inferred>
<deletedbyinference>false</deletedbyinference>
<trust>0.9</trust>
<inferenceprovenance>dedup-similarity-result-levenstein</inferenceprovenance>
<provenanceaction classid="sysimport:dedup" classname="sysimport:dedup"
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
</datainfo>
<rels>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>wt__________::4de25ac59f6cb729d5716260164bb67c</to>
<legalname>Indian Institute Of Science</legalname>
</rel>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>nih_________::ba7da8316fd53d04a985bc935e438555</to>
<legalname>INDIAN INSTITUTE OF SCIENCE</legalname>
</rel>
<rel inferred="true" trust="0.9" inferenceprovenance=""
provenanceaction="sysimport:actionset">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>dedup_wf_001::0047940c0207b6a83e79cd803ecf17d1</to>
<country classid="GB" classname="United Kingdom" schemeid="dnet:countries"
schemename="dnet:countries"/>
<legalname>MRC - MRC Laboratory of Molecular Biology</legalname>
<legalshortname>LMB</legalshortname>
</rel>
<rel inferred="true" trust="0.8847"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>rcuk________::2558c4f3132f6907f7b23c69009f0d87</to>
<legalname>INDIAN INSTUTUTE OF SCIENCE</legalname>
</rel>
<rel inferred="true" trust="0.9" inferenceprovenance=""
provenanceaction="sysimport:actionset">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>dedup_wf_001::d2fdc8e80f8b4365091bcea83f918ccf</to>
<country classid="IN" classname="India" schemeid="dnet:countries"
schemename="dnet:countries"/>
<legalname>University of Delhi</legalname>
<legalshortname>University of Delhi</legalshortname>
</rel>
<rel inferred="true" trust="0.7200000000000001"
inferenceprovenance="dedup-similarity-result-levenstein" provenanceaction="">
<to class="merges" scheme="dnet:result_result_relations" type="publication"
>doiboost____::d5177e3ad00bd9288201b60206a0b5d0</to>
<dateofacceptance>2017-6-30</dateofacceptance>
<collectedfrom name="Crossref"
id="openaire____::081b82f96300b6a6e3d282bad31cb6e2"/>
<collectedfrom name="UnpayWall"
id="openaire____::8ac8380272269217cb09a928c8caa993"/>
<collectedfrom name="Microsoft Academic Graph"
id="openaire____::5f532a3fc4f1ea403f37070f59a7a53a"/>
<pid classid="doi" classname="doi" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false"
>10.1177/0963689717714320</pid>
</rel>
<rel inferred="true" trust="0.7200000000000001"
inferenceprovenance="dedup-similarity-result-levenstein" provenanceaction="">
<to class="merges" scheme="dnet:result_result_relations" type="publication"
>od_______267::fb470352a4b33af7c83391c02117c4fc</to>
<collectedfrom name="PubMed Central"
id="opendoar____::eda80a3d5b344bc40f3bc04f65b7a357"/>
<publisher>SAGE Publications</publisher>
<pid classid="pmc" classname="pmc" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">PMC5657733</pid>
<pid classid="pmid" classname="pmid" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">28933215</pid>
<dateofacceptance>2017-06-01</dateofacceptance>
<pid classid="doi" classname="doi" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false"
>10.1177/0963689717714320</pid>
</rel>
<rel inferred="true" trust="0.6249"
inferenceprovenance="iis::document_referencedProjects"
provenanceaction="iis">
<to class="isProducedBy" scheme="dnet:project_project_relations"
type="project">nih_________::24e81ae35bbcb50c778df1039f912617</to>
<funding>
<funder id="nih_________::NIH" shortname="NIH"
name="National Institutes of Health" jurisdiction="US"/>
<funding_level_0 name="VETERANS AFFAIRS"
>nih_________::NIH::VETERANS_AFFAIRS</funding_level_0>
</funding>
<title>Preventing TBI-Induced Chronic Functional Loss with a Neuroprotective
Antioxidant</title>
<code>1I01RX001520-01A1</code>
</rel>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>wt__________::52e59d4aa1c57bda1ec144f409de83fc</to>
<legalname>Indian Institute of Science</legalname>
</rel>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>dedup_wf_001::0499ff413ba8e7fa686531725ba12338</to>
<legalshortname>IISc</legalshortname>
<country classid="IN" classname="India" schemeid="dnet:countries"
schemename="dnet:countries"/>
<legalname>Indian Institute of Science</legalname>
</rel>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>wt__________::ba1db3669859a46e72f222052a9a26d8</to>
<legalname>University of Delhi</legalname>
</rel>
<rel inferred="true" trust="0.9" inferenceprovenance=""
provenanceaction="sysimport:actionset">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>dedup_wf_001::17c785347dfb060aa115af824b0c6789</to>
<legalshortname>IISc</legalshortname>
<country classid="IN" classname="India" schemeid="dnet:countries"
schemename="dnet:countries"/>
<legalname>Indian Institute of Science Bangalore</legalname>
</rel>
<rel inferred="true" trust="0.7200000000000001"
inferenceprovenance="dedup-similarity-result-levenstein" provenanceaction="">
<to class="merges" scheme="dnet:result_result_relations" type="publication"
>scholexplore::16181ec1a2484116e8ed6b3348858fe7</to>
<collectedfrom name="scholExplorer"
id="openaire____::e034d6a11054f5ade9221ebac484e864"/>
<pid classid="pmid" classname="pmid" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false">28933215</pid>
</rel>
<rel inferred="true" trust="0.7200000000000001"
inferenceprovenance="dedup-similarity-result-levenstein" provenanceaction="">
<to class="merges" scheme="dnet:result_result_relations" type="publication"
>doajarticles::cac994ec6c322070c41474486eb5c595</to>
<dateofacceptance>2017-07-01</dateofacceptance>
<collectedfrom name="DOAJ-Articles"
id="driver______::bee53aa31dc2cbb538c10c2b65fa5824"/>
<publisher>SAGE Publishing</publisher>
<pid classid="doi" classname="doi" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false"
>10.1177/0963689717714320</pid>
</rel>
<rel inferred="true" trust="0.6775"
inferenceprovenance="iis::document_referencedDatasets"
provenanceaction="iis">
<to class="isRelatedTo" scheme="dnet:result_result_relations" type="dataset"
>r37980778c78::39a72c53d5801325784f728b543a49a1</to>
<collectedfrom name="figshare"
id="re3data_____::7980778c78fb4cf0fab13ce2159030dc"/>
<pid classid="doi" classname="doi" schemeid="dnet:pid_types"
schemename="dnet:pid_types" inferred="false"
>10.1371/journal.pone.0006628</pid>
<dateofacceptance>2016-01-01</dateofacceptance>
<publisher>Figshare</publisher>
</rel>
<rel inferred="true" trust="0.8998"
inferenceprovenance="iis::document_affiliations" provenanceaction="iis">
<to class="hasAuthorInstitution"
scheme="dnet:organization_organization_relations" type="organization"
>rcuk________::23feba2a5ca7f6b6016bf3a45180da50</to>
<legalname>University of Delhi</legalname>
</rel>
</rels>
<children>
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">
<accessright classid="UNKNOWN" classname="not available"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="scholExplorer"
id="openaire____::e034d6a11054f5ade9221ebac484e864"/>
<hostedby name="Unknown Repository"
id="openaire____::55045bd2a65019fd8e6741a755395c8c"/>
<instancetype classid="0000" classname="Unknown"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>https://www.ncbi.nlm.nih.gov/pubmed/28933215</url>
</webresource>
</instance>
<instance id="opendoar____::8b6dd7db9af49e67306feb59a8bdc52c">
<accessright classid="OPEN" classname="Open Access"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="PubMed Central"
id="opendoar____::eda80a3d5b344bc40f3bc04f65b7a357"/>
<hostedby name="Europe PubMed Central"
id="opendoar____::8b6dd7db9af49e67306feb59a8bdc52c"/>
<dateofacceptance>2017-06-01</dateofacceptance>
<instancetype classid="0001" classname="Article"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>http://europepmc.org/articles/PMC5657733</url>
</webresource>
</instance>
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">
<accessright classid="UNKNOWN" classname="not available"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="Crossref"
id="openaire____::081b82f96300b6a6e3d282bad31cb6e2"/>
<hostedby name="Unknown Repository"
id="openaire____::55045bd2a65019fd8e6741a755395c8c"/>
<instancetype classid="0001" classname="Article"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>http://journals.sagepub.com/doi/full-xml/10.1177/0963689717714320</url>
</webresource>
<webresource>
<url>http://journals.sagepub.com/doi/pdf/10.1177/0963689717714320</url>
</webresource>
<webresource>
<url>https://academic.microsoft.com/#/detail/2588640354</url>
</webresource>
</instance>
<instance id="doajarticles::4607792f6beaa6e33d5ff5c63830da98">
<accessright classid="OPEN" classname="Open Access"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="DOAJ-Articles"
id="driver______::bee53aa31dc2cbb538c10c2b65fa5824"/>
<hostedby name="Cell Transplantation"
id="doajarticles::4607792f6beaa6e33d5ff5c63830da98"/>
<dateofacceptance>2017-07-01</dateofacceptance>
<instancetype classid="0001" classname="Article"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>https://doi.org/10.1177/0963689717714320</url>
</webresource>
<webresource>
<url>https://doaj.org/toc/0963-6897</url>
</webresource>
<webresource>
<url>https://doaj.org/toc/1555-3892</url>
</webresource>
</instance>
<instance id="doajarticles::4607792f6beaa6e33d5ff5c63830da98">
<accessright classid="RESTRICTED" classname="Restricted"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="Crossref"
id="openaire____::081b82f96300b6a6e3d282bad31cb6e2"/>
<hostedby name="Cell Transplantation"
id="doajarticles::4607792f6beaa6e33d5ff5c63830da98"/>
<instancetype classid="0001" classname="Article"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>http://dx.doi.org/10.1177/0963689717714320</url>
</webresource>
</instance>
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">
<accessright classid="OPEN" classname="Open Access"
schemeid="dnet:access_modes" schemename="dnet:access_modes"/>
<collectedfrom name="UnpayWall"
id="openaire____::8ac8380272269217cb09a928c8caa993"/>
<hostedby name="Unknown Repository"
id="openaire____::55045bd2a65019fd8e6741a755395c8c"/>
<instancetype classid="0001" classname="Article"
schemeid="dnet:publication_resource"
schemename="dnet:publication_resource"/>
<webresource>
<url>https://journals.sagepub.com/doi/pdf/10.1177/0963689717714320</url>
</webresource>
</instance>
</children>
</oaf:result>
<extraInfo name="result citations" typology="citations"
provenance="iis::document_referencedDocuments" trust="0.9">
<citations>
<citation position="1">
<rawText>1 Bryan-Hancock C Harrison J The global burden of traumatic brain
injury: preliminary results from the Global Burden of Disease Project.
Inj Prev. 2010;16(Suppl 1):A17.</rawText>
</citation>
<citation position="2">
<rawText>2 Gardner RC Yaffe K Epidemiology of mild traumatic brain injury
and neurodegenerative disease. Mol Cell Neurosci. 2015;66(Pt
B):7580.25748121</rawText>
<id value="dedup_wf_001::7ea5a74d23841025462422c95f25f5de" type="openaire"
confidenceLevel="0.9"/>
<id value="25748121" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="3">
<rawText>3 Stern RA Riley DO Daneshvar DH Nowinski CJ Cantu RC McKee AC
Long-term consequences of repetitive brain trauma: chronic traumatic
encephalopathy. PM R. 2011;3(10 Suppl 2):S460S467.22035690</rawText>
<id value="scholexplore::4cd90a17b23dffce9a8cf58d81ca9e31" type="openaire"
confidenceLevel="0.9"/>
<id value="22035690" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="4">
<rawText>4 Mac Donald CL Johnson AM Cooper D Nelson EC Werner NJ Shimony JS
Snyder AZ Raichle ME Witherow JR Fang R Detection of blast-related
traumatic brain injury in U.S. military personnel. N Engl J Med.
2011;364(22):20912100.21631321</rawText>
<id value="dedup_wf_001::81f1d0d6f713d335cb4f8dbbd5aea3b5" type="openaire"
confidenceLevel="0.9"/>
<id value="21631321" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="5">
<rawText>5 Hatic H Kane MJ Saykally JN Citron BA Modulation of transcription
factor Nrf2 in an in vitro model of traumatic brain injury. J
Neurotrauma. 2012;29(6):11881196.22201269</rawText>
<id value="scholexplore::5a883cd3f6eab6dcfd9253dc3a667794" type="openaire"
confidenceLevel="0.9"/>
<id value="22201269" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="6">
<rawText>6 Saykally JN Rachmany L Hatic H Shaer A Rubovitch V Pick CG Citron
BA The nuclear factor erythroid 2-like 2 activator,
tert-butylhydroquinone, improves cognitive performance in mice after
mild traumatic brain injury. Neuroscience.
2012;223:305314.22890082</rawText>
<id value="dedup_wf_001::e1fdece0d7d8e81cf0497971758a07d7" type="openaire"
confidenceLevel="0.9"/>
<id value="22890082" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="7">
<rawText>7 Hall ED Vaishnav RA Mustafa AG Antioxidant therapies for
traumatic brain injury. Neurotherapeutics.
2010;7(1):5161.20129497</rawText>
<id value="dedup_wf_001::dad85993cf85ecab6c20834b36da8fde" type="openaire"
confidenceLevel="0.9"/>
<id value="20129497" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="8">
<rawText>8 Scartezzini P Speroni E Review on some plants of Indian
traditional medicine with antioxidant activity. J Ethnopharmacol.
2000;71(12):2343.10904144</rawText>
<id value="dedup_wf_001::4bdc528dcf25cfa21d6d668263b519f4" type="openaire"
confidenceLevel="0.9"/>
<id value="10904144" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="9">
<rawText>9 Mishra LC Singh BB Dagenais S Scientific basis for the
therapeutic use of Withania somnifera (ashwagandha): a review. Altern
Med Rev. 2000;5(4):334346.10956379</rawText>
<id value="scholexplore::8093622f33e4a82bf55d0dc741b5023c" type="openaire"
confidenceLevel="0.9"/>
<id value="10956379" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="10">
<rawText>10 Singh RH Exploring larger evidence-base for contemporary
Ayurveda. Int J Ayurveda Res. 2010;1(2):6566.20814517</rawText>
<id value="dedup_wf_001::ceac054eb7c6b9bd15dfad24c6e339a5" type="openaire"
confidenceLevel="0.9"/>
<id value="20814517" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="11">
<rawText>11 Alam N Hossain M Mottalib MA Sulaiman SA Gan SH Khalil MI
Methanolic extracts of Withania somnifera leaves, fruits and roots
possess antioxidant properties and antibacterial activities. BMC
Complement Altern Med. 2012;12:175.23039061</rawText>
<id value="dedup_wf_001::d85dd0b9c21a5859b53d45391bc905c8" type="openaire"
confidenceLevel="0.9"/>
<id value="23039061" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="12">
<rawText>12 Gupta GL Rana AC Withania somnifera (ashwagandha): a review.
Pharmacognosy Rev. 2007;1(1):129136.</rawText>
</citation>
<citation position="13">
<rawText>13 Durg S Dhadde SB Vandal R Shivakumar BS Charan CS Withania
somnifera (ashwagandha) in neurobehavioural disorders induced by brain
oxidative stress in rodents: a systematic review and meta-analysis. J
Pharm Pharmacol. 2015;67(7):879899.25828061</rawText>
<id value="dedup_wf_001::aec7c43727fca72e466703fad9cd50d0" type="openaire"
confidenceLevel="0.9"/>
<id value="25828061" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="14">
<rawText>14 Kuboyama T Tohda C Komatsu K Effects of ashwagandha (roots of
Withania somnifera) on neurodegenerative diseases. Biol Pharm Bull.
2014;37(6):892897.24882401</rawText>
<id value="dedup_wf_001::e475da1c365be8b7938bd439cd91b1ce" type="openaire"
confidenceLevel="0.9"/>
<id value="24882401" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="15">
<rawText>15 Mirjalili MH Moyano E Bonfill M Cusido RM Palazon J Steroidal
lactones from Withania somnifera, an ancient plant for novel medicine.
Molecules. 2009;14(7):23732393.19633611</rawText>
<id value="dedup_wf_001::034ea95279138fb4f2e3fc41e50c953f" type="openaire"
confidenceLevel="0.9"/>
<id value="19633611" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="16">
<rawText>16 Ven Murthy MR Ranjekar PK Ramassamy C Deshpande M Scientific
basis for the use of Indian ayurvedic medicinal plants in the treatment
of neurodegenerative disorders: ashwagandha. Cent Nerv Syst Agents Med
Chem. 2010;10(3):238246.20528765</rawText>
<id value="scholexplore::ec8cea63a81d526a1a8133af3557674f" type="openaire"
confidenceLevel="0.9"/>
<id value="20528765" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="17">
<rawText>17 Singh RH Narsimhamurthy K Singh G Neuronutrient impact of
Ayurvedic Rasayana therapy in brain aging. Biogerontology.
2008;9(6):369374.18931935</rawText>
<id value="dedup_wf_001::41e1d24589d4c1e6b7bf322732b69227" type="openaire"
confidenceLevel="0.9"/>
<id value="18931935" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="18">
<rawText>18 Kulkarni SK Dhir A Withania somnifera: an Indian ginseng. Prog
Neuropsychopharmacol Biol Psychiatry.
2008;32(5):10931105.17959291</rawText>
<id value="dedup_wf_001::c0c07dd044ffb2996b047e09c0eb8891" type="openaire"
confidenceLevel="0.9"/>
<id value="17959291" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="19">
<rawText>19 Cooley K Szczurko O Perri D Mills EJ Bernhardt B Zhou Q Seely D
Naturopathic care for anxiety: a randomized controlled trial
ISRCTN78958974. PLoS One. 2009;4(8):e6628.19718255</rawText>
<id value="dedup_wf_001::6c8ad2b3098d9bf2207317744f8c8632" type="openaire"
confidenceLevel="0.9"/>
<id value="19718255" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="20">
<rawText>20 Chopra A Lavin P Patwardhan B Chitre D A 32-week randomized,
placebo-controlled clinical evaluation of RA-11, an Ayurvedic drug, on
osteoarthritis of the knees. J Clin Rheumatol.
2004;10(5):236245.17043520</rawText>
<id value="dedup_wf_001::6ad0c8932a7eab3967c0800822af5177" type="openaire"
confidenceLevel="0.9"/>
<id value="17043520" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="21">
<rawText>21 Chaudhary G Sharma U Jagannathan NR Gupta YK Evaluation of
Withania somnifera in a middle cerebral artery occlusion model of stroke
in rats. Clin Exp Pharmacol Physiol.
2003;30(56):399404.12859433</rawText>
<id value="12859433" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="22">
<rawText>22 Adams JD Jr Yang J Mishra LC Singh BB Effects of ashwagandha in
a rat model of stroke. Altern Ther Health Med.
2002;8(5):1819.</rawText>
</citation>
<citation position="23">
<rawText>23 Baitharu I Jain V Deep SN Hota KB Hota SK Prasad D Ilavazhagan G
Withania somnifera root extract ameliorates hypobaric hypoxia induced
memory impairment in rats. J Ethnopharmacol.
2013;145(2):431441.23211660</rawText>
<id value="dedup_wf_001::744d9bbe76384c0e933cb2b8099105ba" type="openaire"
confidenceLevel="0.9"/>
<id value="23211660" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="24">
<rawText>24 RajaSankar S Manivasagam T Sankar V Prakash S Muthusamy R
Krishnamurti A Surendran S Withania somnifera root extract improves
catecholamines and physiological abnormalities seen in a Parkinsons
disease model mouse. J Ethnopharmacol.
2009;125(3):369373.19666100</rawText>
<id value="dedup_wf_001::7d60b85f60390db7b841f13778fbbd8a" type="openaire"
confidenceLevel="0.9"/>
<id value="19666100" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="25">
<rawText>25 Pingali U Pilli R Fatima N Effect of standardized aqueous
extract of Withania somnifera on tests of cognitive and psychomotor
performanc e in healthy human participants. Pharmacognosy Res.
2014;6(1):1218.24497737</rawText>
<id value="dedup_wf_001::024f29db87472548b39ca9ce8ff4c76e" type="openaire"
confidenceLevel="0.9"/>
<id value="24497737" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="26">
<rawText>26 Prabhakaran Y Dinakaran SK Macharala SP Ghosh S Karanam SR
Kanthasamy N Avasarala H Molecular docking studies of withanolides
against Cox-2 enzyme. Pak J Pharm Sci.
2012;25(3):595598.22713947</rawText>
<id value="dedup_wf_001::5ff38da7b7b422ff22180741a908aa14" type="openaire"
confidenceLevel="0.9"/>
<id value="22713947" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="27">
<rawText>27 Mohan R Hammers HJ Bargagna-Mohan P Zhan XH Herbstritt CJ Ruiz A
Zhang L Hanson AD Conner BP Rougas J Withaferin A is a potent inhibitor
of angiogenesis. Angiogenesis. 2004;7(2):115122.15516832</rawText>
<id value="dedup_wf_001::dd552ab21a101677856a159c9d74908a" type="openaire"
confidenceLevel="0.9"/>
<id value="15516832" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="28">
<rawText>28 Friedemann T Otto B Klatschke K Schumacher U Tao Y Leung AK
Efferth T Schroder S Coptis chinensis Franch. exhibits neuroprotective
properties against oxidative stress in human neuroblastoma cells. J
Ethnopharmacol. 2014;155(1):607615.24929105</rawText>
<id value="dedup_wf_001::9f8a8f6e0aedf9dff47a66b8229baf87" type="openaire"
confidenceLevel="0.9"/>
<id value="24929105" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="29">
<rawText>29 Hu S Han R Mak S Han Y Protection against
1-methyl-4-phenylpyridinium ion (MPP+)-induced apoptosis by water
extract of ginseng (Panax ginseng C.A. Meyer) in SH-SY5Y cells. J
Ethnopharmacol. 2011;135(1):3442.21349320</rawText>
<id value="scholexplore::646e892807dc6e1a30938d439b879a76" type="openaire"
confidenceLevel="0.9"/>
<id value="21349320" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="30">
<rawText>30 Kane MJ Hatic H Delic V Dennis JS Butler CL Saykally JN Citron
BA Modeling the pathobiology of repetitive traumatic brain injury in
immortalized neuronal cell lines. Brain Res.
2011;1425:123131.22018688</rawText>
<id value="22018688" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="31">
<rawText>31 Sehgal N Gupta A Valli RK Joshi SD Mills JT Hamel E Khanna P
Jain SC Thakur SS Ravindranath V Withania somnifera reverses Alzheimers
disease pathology by enhancing low-density lipoprotein receptor-related
protein in liver. Proc Natl Acad Sci U S A.
2012;109(9):35103515.22308347</rawText>
<id value="dedup_wf_001::6a5a977979d2b9beed791c95191ca124" type="openaire"
confidenceLevel="0.9"/>
<id value="22308347" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="32">
<rawText>32 Arundine M Aarts M Lau A Tymianski M Vulnerability of central
neurons to secondary insults after in vitro mechanical stretch. J
Neurosci. 2004;24(37):81068123.15371512</rawText>
<id value="scholexplore::a92bd7b567b76e9a41c190ee141483da" type="openaire"
confidenceLevel="0.9"/>
<id value="15371512" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="33">
<rawText>33 Lau A Arundine M Sun HS Jones M Tymianski M Inhibition of
caspase-mediated apoptosis by peroxynitrite in traumatic brain injury. J
Neurosci. 2006;26(45):1154011553.17093075</rawText>
<id value="17093075" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="34">
<rawText>34 Weber JT Rzigalinski BA Ellis EF Traumatic injury of cortical
neurons causes changes in intracellular calcium stores and capacitative
calcium influx. J Biol Chem. 2001;276(3):18001807.11050103</rawText>
<id value="11050103" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="35">
<rawText>35 Ellis EF McKinney JS Willoughby KA Liang S Povlishock JT A new
model for rapid stretch-induced injury of cells in culture:
characterization of the model using astrocytes. J Neurotrauma.
1995;12(3):325339.7473807</rawText>
<id value="7473807" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="36">
<rawText>36 Zhang Y Ba Y Liu C Sun G Ding L Gao S Hao J Yu Z Zhang J Zen K
PGC-1alpha induces apoptosis in human epithelial ovarian cancer cells
through a PPARgamma-dependent pathway. Cell Res.
2007;17(4):363373.17372612</rawText>
<id value="17372612" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="37">
<rawText>37 Brooks AR Lelkes PI Rubanyi GM Gene expression profiling of
human aortic endothelial cells exposed to disturbed flow and steady
laminar flow. Physiol Genomics. 2002;9(1):2741.11948288</rawText>
<id value="11948288" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="38">
<rawText>38 Du Y Villeneuve NF Wang XJ Sun Z Chen W Li J Lou H Wong PK Zhang
DD Oridonin confers protection against arsenic-induced toxicity through
activation of the Nrf2-mediated defensive response. Environ Health
Perspect. 2008;116(9):11541161.18795156</rawText>
<id value="dedup_wf_001::c4cf0ed4f97b1c6e4c75fcb9c7afe9e0" type="openaire"
confidenceLevel="0.9"/>
<id value="18795156" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="39">
<rawText>39 Pool M Thiemann J Bar-Or A Fournier AE NeuriteTracer: a novel
ImageJ plugin for automated quantification of neurite outgrowth. J
Neurosci Methods. 2008;168(1):134139.17936365</rawText>
<id value="dedup_wf_001::6880dcaf7780c1e1c331a14f57657cb9" type="openaire"
confidenceLevel="0.9"/>
<id value="17936365" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="40">
<rawText>40 Chen J Wu X Shao B Zhao W Shi W Zhang S Ni L Shen A Increased
expression of TNF receptor-associated factor 6 after rat traumatic brain
injury. Cell Mol Neurobiol. 2011;31(2):269275.21072581</rawText>
<id value="21072581" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="41">
<rawText>41 Kuboyama T Tohda C Komatsu K Neuritic regeneration and synaptic
reconstruction induced by withanolide A. Br J Pharmacol.
2005;144(7):961971.15711595</rawText>
<id value="scholexplore::b4df67f5a2958ef86b5ad05198145918" type="openaire"
confidenceLevel="0.9"/>
<id value="15711595" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="42">
<rawText>42 Kuboyama T Tohda C Komatsu K Withanoside IV and its active
metabolite, sominone, attenuate Abeta(25-35)-induced neurodegeneration
Eur J Neurosci. 2006;23(6):14171426.16553605</rawText>
<id value="scholexplore::76a1d92beacaba8b41d27bcfe74b499d" type="openaire"
confidenceLevel="0.9"/>
<id value="16553605" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="43">
<rawText>43 Jarrard LE On the role of the hippocampus in learning and memory
in the rat. Behav Neural Biol. 1993;60(1):926.8216164</rawText>
<id value="dedup_wf_001::b3c0dbd186731e31ce56da1154570e0f" type="openaire"
confidenceLevel="0.9"/>
<id value="8216164" type="pmid" confidenceLevel="0.9"/>
</citation>
<citation position="44">
<rawText>44 Vareed SK Bauer AK Nair KM Liu Y Jayaprakasam B Nair MG
Blood-brain barrier permeability of bioactive withanamides present in
Withania somnifera fruit extract. Phytother Res.
2014;28(8):12601264.24458838</rawText>
<id value="scholexplore::72d81005d8571c5fd59daf66a9d0fc73" type="openaire"
confidenceLevel="0.9"/>
<id value="24458838" type="pmid" confidenceLevel="0.9"/>
</citation>
</citations>
</extraInfo>
</oaf:entity>
</metadata>
</result>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>dhp-stats-update</artifactId> <artifactId>dhp-stats-update</artifactId>

View File

@ -15,12 +15,12 @@
<name>oozie.action.sharelib.for.spark</name> <name>oozie.action.sharelib.for.spark</name>
<value>spark2</value> <value>spark2</value>
</property> </property>
<property> <property>
<name>hive_metastore_uris</name> <name>hiveMetastoreUris</name>
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value> <value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
</property> </property>
<property> <property>
<name>hive_jdbc_url</name> <name>hiveJdbcUrl</name>
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value> <value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
</property> </property>
<property> <property>

View File

@ -1,2 +1,2 @@
DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages; DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages;
CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from openaire.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages; CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from ${openaire_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages;

View File

@ -14,11 +14,11 @@
<description>the external stats that should be added since they are not included in the graph database</description> <description>the external stats that should be added since they are not included in the graph database</description>
</property> </property>
<property> <property>
<name>hive_metastore_uris</name> <name>hiveMetastoreUris</name>
<description>hive server metastore URIs</description> <description>hive server metastore URIs</description>
</property> </property>
<property> <property>
<name>hive_jdbc_url</name> <name>hiveJdbcUrl</name>
<description>hive server jdbc url</description> <description>hive server jdbc url</description>
</property> </property>
</parameters> </parameters>
@ -29,7 +29,7 @@
<configuration> <configuration>
<property> <property>
<name>hive.metastore.uris</name> <name>hive.metastore.uris</name>
<value>${hive_metastore_uris}</value> <value>${hiveMetastoreUris}</value>
</property> </property>
</configuration> </configuration>
</global> </global>
@ -42,7 +42,7 @@
<action name="Step1"> <action name="Step1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step1.sql</script> <script>scripts/step1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -53,7 +53,7 @@
<action name="Step2_1"> <action name="Step2_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_1.sql</script> <script>scripts/step2_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -64,7 +64,7 @@
<action name="Step2_2"> <action name="Step2_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_2.sql</script> <script>scripts/step2_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -75,7 +75,7 @@
<action name="Step2_3"> <action name="Step2_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_3.sql</script> <script>scripts/step2_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -86,7 +86,7 @@
<action name="Step2_4"> <action name="Step2_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_4.sql</script> <script>scripts/step2_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -97,7 +97,7 @@
<action name="Step2_5"> <action name="Step2_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_5.sql</script> <script>scripts/step2_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -108,7 +108,7 @@
<action name="Step2_6"> <action name="Step2_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_6.sql</script> <script>scripts/step2_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -119,7 +119,7 @@
<action name="Step2_7"> <action name="Step2_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_7.sql</script> <script>scripts/step2_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -130,7 +130,7 @@
<action name="Step2_8"> <action name="Step2_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_8.sql</script> <script>scripts/step2_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -141,7 +141,7 @@
<action name="Step2_9"> <action name="Step2_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step2_9.sql</script> <script>scripts/step2_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -152,7 +152,7 @@
<action name="Step3"> <action name="Step3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step3.sql</script> <script>scripts/step3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -163,7 +163,7 @@
<action name="Step4_1"> <action name="Step4_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_1.sql</script> <script>scripts/step4_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -174,7 +174,7 @@
<action name="Step4_2"> <action name="Step4_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_2.sql</script> <script>scripts/step4_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -185,7 +185,7 @@
<action name="Step4_3"> <action name="Step4_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_3.sql</script> <script>scripts/step4_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -196,7 +196,7 @@
<action name="Step4_4"> <action name="Step4_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_4.sql</script> <script>scripts/step4_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -207,7 +207,7 @@
<action name="Step4_5"> <action name="Step4_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_5.sql</script> <script>scripts/step4_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -218,7 +218,7 @@
<action name="Step4_6"> <action name="Step4_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_6.sql</script> <script>scripts/step4_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -229,7 +229,7 @@
<action name="Step4_7"> <action name="Step4_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_7.sql</script> <script>scripts/step4_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -240,7 +240,7 @@
<action name="Step4_8"> <action name="Step4_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_8.sql</script> <script>scripts/step4_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -251,7 +251,7 @@
<action name="Step4_9"> <action name="Step4_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_9.sql</script> <script>scripts/step4_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -262,7 +262,7 @@
<action name="Step4_10"> <action name="Step4_10">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step4_10.sql</script> <script>scripts/step4_10.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -273,7 +273,7 @@
<action name="Step5_1"> <action name="Step5_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_1.sql</script> <script>scripts/step5_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -284,7 +284,7 @@
<action name="Step5_2"> <action name="Step5_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_2.sql</script> <script>scripts/step5_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -295,7 +295,7 @@
<action name="Step5_3"> <action name="Step5_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_3.sql</script> <script>scripts/step5_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -306,7 +306,7 @@
<action name="Step5_4"> <action name="Step5_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_4.sql</script> <script>scripts/step5_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -317,7 +317,7 @@
<action name="Step5_5"> <action name="Step5_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_5.sql</script> <script>scripts/step5_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -328,7 +328,7 @@
<action name="Step5_6"> <action name="Step5_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_6.sql</script> <script>scripts/step5_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -339,7 +339,7 @@
<action name="Step5_7"> <action name="Step5_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_7.sql</script> <script>scripts/step5_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -350,7 +350,7 @@
<action name="Step5_8"> <action name="Step5_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_8.sql</script> <script>scripts/step5_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -361,7 +361,7 @@
<action name="Step5_9"> <action name="Step5_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_9.sql</script> <script>scripts/step5_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -372,7 +372,7 @@
<action name="Step5_10"> <action name="Step5_10">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step5_10.sql</script> <script>scripts/step5_10.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -383,7 +383,7 @@
<action name="Step6_1"> <action name="Step6_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_1.sql</script> <script>scripts/step6_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -394,7 +394,7 @@
<action name="Step6_2"> <action name="Step6_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_2.sql</script> <script>scripts/step6_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -405,7 +405,7 @@
<action name="Step6_3"> <action name="Step6_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_3.sql</script> <script>scripts/step6_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -416,7 +416,7 @@
<action name="Step6_4"> <action name="Step6_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_4.sql</script> <script>scripts/step6_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -427,7 +427,7 @@
<action name="Step6_5"> <action name="Step6_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_5.sql</script> <script>scripts/step6_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -438,7 +438,7 @@
<action name="Step6_6"> <action name="Step6_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_6.sql</script> <script>scripts/step6_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -449,7 +449,7 @@
<action name="Step6_7"> <action name="Step6_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_7.sql</script> <script>scripts/step6_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -460,7 +460,7 @@
<action name="Step6_8"> <action name="Step6_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_8.sql</script> <script>scripts/step6_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -471,7 +471,7 @@
<action name="Step6_9"> <action name="Step6_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_9.sql</script> <script>scripts/step6_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -482,7 +482,7 @@
<action name="Step6_10"> <action name="Step6_10">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step6_10.sql</script> <script>scripts/step6_10.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -493,7 +493,7 @@
<action name="Step7_1"> <action name="Step7_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step7_1.sql</script> <script>scripts/step7_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -504,7 +504,7 @@
<action name="Step7_2"> <action name="Step7_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step7_2.sql</script> <script>scripts/step7_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -515,7 +515,7 @@
<action name="Step7_3"> <action name="Step7_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step7_3.sql</script> <script>scripts/step7_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -526,7 +526,7 @@
<action name="Step7_4"> <action name="Step7_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step7_4.sql</script> <script>scripts/step7_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -537,7 +537,7 @@
<action name="Step7_5"> <action name="Step7_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step7_5.sql</script> <script>scripts/step7_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -548,7 +548,7 @@
<action name="Step8_1"> <action name="Step8_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_1.sql</script> <script>scripts/step8_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -559,7 +559,7 @@
<action name="Step8_2"> <action name="Step8_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_2.sql</script> <script>scripts/step8_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -570,7 +570,7 @@
<action name="Step8_3"> <action name="Step8_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_3.sql</script> <script>scripts/step8_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -581,7 +581,7 @@
<action name="Step8_4"> <action name="Step8_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_4.sql</script> <script>scripts/step8_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -592,7 +592,7 @@
<action name="Step8_5"> <action name="Step8_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_5.sql</script> <script>scripts/step8_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -603,7 +603,7 @@
<action name="Step8_6"> <action name="Step8_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_6.sql</script> <script>scripts/step8_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -614,7 +614,7 @@
<action name="Step8_7"> <action name="Step8_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_7.sql</script> <script>scripts/step8_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -625,7 +625,7 @@
<action name="Step8_8"> <action name="Step8_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_8.sql</script> <script>scripts/step8_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -636,7 +636,7 @@
<action name="Step8_9"> <action name="Step8_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_9.sql</script> <script>scripts/step8_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -647,7 +647,7 @@
<action name="Step8_10"> <action name="Step8_10">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_10.sql</script> <script>scripts/step8_10.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -658,7 +658,7 @@
<action name="Step8_11"> <action name="Step8_11">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step8_11.sql</script> <script>scripts/step8_11.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -669,7 +669,7 @@
<action name="Step9_1"> <action name="Step9_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_1.sql</script> <script>scripts/step9_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -680,7 +680,7 @@
<action name="Step9_2"> <action name="Step9_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_2.sql</script> <script>scripts/step9_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -691,7 +691,7 @@
<action name="Step9_3"> <action name="Step9_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_3.sql</script> <script>scripts/step9_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -702,7 +702,7 @@
<action name="Step9_4"> <action name="Step9_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_4.sql</script> <script>scripts/step9_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -713,7 +713,7 @@
<action name="Step9_5"> <action name="Step9_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_5.sql</script> <script>scripts/step9_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -724,7 +724,7 @@
<action name="Step9_6"> <action name="Step9_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_6.sql</script> <script>scripts/step9_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -735,7 +735,7 @@
<action name="Step9_7"> <action name="Step9_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_7.sql</script> <script>scripts/step9_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -746,7 +746,7 @@
<action name="Step9_8"> <action name="Step9_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_8.sql</script> <script>scripts/step9_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -757,7 +757,7 @@
<action name="Step9_9"> <action name="Step9_9">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step9_9.sql</script> <script>scripts/step9_9.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -768,7 +768,7 @@
<action name="Step10_1"> <action name="Step10_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step10_1.sql</script> <script>scripts/step10_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -779,7 +779,7 @@
<action name="Step10_2"> <action name="Step10_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step10_2.sql</script> <script>scripts/step10_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -790,7 +790,7 @@
<action name="Step10_3"> <action name="Step10_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step10_3.sql</script> <script>scripts/step10_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -801,7 +801,7 @@
<action name="Step11"> <action name="Step11">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step11.sql</script> <script>scripts/step11.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -813,7 +813,7 @@
<action name="Step12_1"> <action name="Step12_1">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_1.sql</script> <script>scripts/step12_1.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -824,7 +824,7 @@
<action name="Step12_2"> <action name="Step12_2">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_2.sql</script> <script>scripts/step12_2.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -835,7 +835,7 @@
<action name="Step12_3"> <action name="Step12_3">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_3.sql</script> <script>scripts/step12_3.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -846,7 +846,7 @@
<action name="Step12_4"> <action name="Step12_4">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_4.sql</script> <script>scripts/step12_4.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -857,7 +857,7 @@
<action name="Step12_5"> <action name="Step12_5">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_5.sql</script> <script>scripts/step12_5.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -868,7 +868,7 @@
<action name="Step12_6"> <action name="Step12_6">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_6.sql</script> <script>scripts/step12_6.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -879,7 +879,7 @@
<action name="Step12_7"> <action name="Step12_7">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_7.sql</script> <script>scripts/step12_7.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -890,7 +890,7 @@
<action name="Step12_8"> <action name="Step12_8">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step12_8.sql</script> <script>scripts/step12_8.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -901,7 +901,7 @@
<action name="Step13"> <action name="Step13">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step13.sql</script> <script>scripts/step13.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>
@ -912,7 +912,7 @@
<action name="Step14"> <action name="Step14">
<hive2 xmlns="uri:oozie:hive2-action:0.1"> <hive2 xmlns="uri:oozie:hive2-action:0.1">
<jdbc-url>${hive_jdbc_url}</jdbc-url> <jdbc-url>${hiveJdbcUrl}</jdbc-url>
<script>scripts/step14.sql</script> <script>scripts/step14.sql</script>
<param>stats_db_name=${stats_db_name}</param> <param>stats_db_name=${stats_db_name}</param>
<param>openaire_db_name=${openaire_db_name}</param> <param>openaire_db_name=${openaire_db_name}</param>

View File

@ -1,11 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent> <parent>
<artifactId>dhp-workflows</artifactId> <artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp</artifactId> <artifactId>dhp</artifactId>
<version>1.1.7-SNAPSHOT</version> <version>1.1.8-SNAPSHOT</version>
<relativePath>../</relativePath> <relativePath>../</relativePath>
</parent> </parent>
@ -26,7 +26,7 @@
<module>dhp-dedup-scholexplorer</module> <module>dhp-dedup-scholexplorer</module>
<module>dhp-graph-provision-scholexplorer</module> <module>dhp-graph-provision-scholexplorer</module>
<module>dhp-stats-update</module> <module>dhp-stats-update</module>
<module>dhp-doiboost</module> <module>dhp-broker-events</module>
</modules> </modules>
<pluginRepositories> <pluginRepositories>

1126
pom.xml

File diff suppressed because it is too large Load Diff