master #11

Manually merged
claudio.atzori merged 275 commits from :master into enrichment_wfs 2020-05-11 15:14:56 +02:00
21 changed files with 831 additions and 498 deletions
Showing only changes of commit 5efae3acb9 - Show all commits

View File

@ -6,36 +6,83 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class ModelConstants { public class ModelConstants {
public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies"; public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
public static final String DNET_PUBLICATION_RESOURCE = "dnet:publication_resource";
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
public static final String DNET_LANGUAGES = "dnet:languages";
public static final String DNET_PID_TYPES = "dnet:pid_types";
public static final String DNET_DATA_CITE_DATE = "dnet:dataCite_date";
public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource";
public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions";
public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository";
public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry";
public static final String USER_CLAIM = "user:claim";
public static final String DATASET_RESULTTYPE_CLASSID = "dataset"; public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication"; public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
public static final String SOFTWARE_RESULTTYPE_CLASSID = "software"; public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
public static final String ORP_RESULTTYPE_CLASSID = "other"; public static final String ORP_RESULTTYPE_CLASSID = "other";
public static Qualifier PUBLICATION_DEFAULT_RESULTTYPE = new Qualifier(); public static final String RESULT_RESULT = "resultResult";
public static Qualifier DATASET_DEFAULT_RESULTTYPE = new Qualifier(); public static final String PUBLICATION_DATASET = "publicationDataset";
public static Qualifier SOFTWARE_DEFAULT_RESULTTYPE = new Qualifier(); public static final String IS_RELATED_TO = "isRelatedTo";
public static Qualifier ORP_DEFAULT_RESULTTYPE = new Qualifier(); public static final String SUPPLEMENT = "supplement";
public static final String IS_SUPPLEMENT_TO = "isSupplementTo";
public static final String IS_SUPPLEMENTED_BY = "isSupplementedBy";
public static final String PART = "part";
public static final String IS_PART_OF = "IsPartOf";
public static final String HAS_PARTS = "HasParts";
public static final String RELATIONSHIP = "relationship";
static { public static final String RESULT_PROJECT = "resultProject";
PUBLICATION_DEFAULT_RESULTTYPE.setClassid(PUBLICATION_RESULTTYPE_CLASSID); public static final String OUTCOME = "outcome";
PUBLICATION_DEFAULT_RESULTTYPE.setClassname(PUBLICATION_RESULTTYPE_CLASSID); public static final String IS_PRODUCED_BY = "isProducedBy";
PUBLICATION_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String PRODUCES = "produces";
PUBLICATION_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
DATASET_DEFAULT_RESULTTYPE.setClassid(DATASET_RESULTTYPE_CLASSID); public static final String DATASOURCE_ORGANIZATION = "datasourceOrganization";
DATASET_DEFAULT_RESULTTYPE.setClassname(DATASET_RESULTTYPE_CLASSID); public static final String PROVISION = "provision";
DATASET_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String IS_PROVIDED_BY = "isProvidedBy";
DATASET_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES); public static final String PROVIDES = "provides";
SOFTWARE_DEFAULT_RESULTTYPE.setClassid(SOFTWARE_RESULTTYPE_CLASSID); public static final String PROJECT_ORGANIZATION = "projectOrganization";
SOFTWARE_DEFAULT_RESULTTYPE.setClassname(SOFTWARE_RESULTTYPE_CLASSID); public static final String PARTICIPATION = "participation";
SOFTWARE_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); public static final String HAS_PARTICIPANT = "hasParticipant";
SOFTWARE_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES); public static final String IS_PARTICIPANT = "isParticipant";
ORP_DEFAULT_RESULTTYPE.setClassid(ORP_RESULTTYPE_CLASSID); public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
ORP_DEFAULT_RESULTTYPE.setClassname(ORP_RESULTTYPE_CLASSID); PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
ORP_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES); DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
ORP_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
public static final Qualifier DATASET_DEFAULT_RESULTTYPE = qualifier(
DATASET_RESULTTYPE_CLASSID, DATASET_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier SOFTWARE_DEFAULT_RESULTTYPE = qualifier(
SOFTWARE_RESULTTYPE_CLASSID, SOFTWARE_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier ORP_DEFAULT_RESULTTYPE = qualifier(
ORP_RESULTTYPE_CLASSID, ORP_RESULTTYPE_CLASSID,
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
SYSIMPORT_CROSSWALK_REPOSITORY, SYSIMPORT_CROSSWALK_REPOSITORY,
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
SYSIMPORT_CROSSWALK_ENTITYREGISTRY, SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
private static Qualifier qualifier(
final String classid,
final String classname,
final String schemeid,
final String schemename) {
final Qualifier q = new Qualifier();
q.setClassid(classid);
q.setClassname(classname);
q.setSchemeid(schemeid);
q.setSchemename(schemename);
return q;
} }
} }

View File

@ -7,6 +7,9 @@ import java.util.Objects;
public abstract class Oaf implements Serializable { public abstract class Oaf implements Serializable {
/**
* The list of datasource id/name pairs providing this relationship.
*/
protected List<KeyValue> collectedfrom; protected List<KeyValue> collectedfrom;
private DataInfo dataInfo; private DataInfo dataInfo;

View File

@ -7,16 +7,38 @@ import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/**
* Relation models any edge between two nodes in the OpenAIRE graph. It has a source id and a target id pointing to
* graph node identifiers and it is further characterised by the semantic of the link through the fields relType,
* subRelType and relClass. Provenance information is modeled according to the dataInfo element and collectedFrom, while
* individual relationship types can provide extra information via the properties field.
*/
public class Relation extends Oaf { public class Relation extends Oaf {
/**
* Main relationship classifier, values include 'resultResult', 'resultProject', 'resultOrganization', etc.
*/
private String relType; private String relType;
/**
* Further classifies a relationship, values include 'affiliation', 'similarity', 'supplement', etc.
*/
private String subRelType; private String subRelType;
/**
* Indicates the direction of the relationship, values include 'isSupplementTo', 'isSupplementedBy', 'merges,
* 'isMergedIn'.
*/
private String relClass; private String relClass;
/**
* The source entity id.
*/
private String source; private String source;
/**
* The target entity id.
*/
private String target; private String target;
public String getRelType() { public String getRelType() {

View File

@ -1,17 +1,21 @@
package eu.dnetlib.dhp.collection; package eu.dnetlib.dhp.collection;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
@ -23,6 +27,8 @@ import org.apache.spark.util.LongAccumulator;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -35,6 +41,8 @@ import eu.dnetlib.message.MessageType;
public class GenerateNativeStoreSparkJob { public class GenerateNativeStoreSparkJob {
private static final Logger log = LoggerFactory.getLogger(GenerateNativeStoreSparkJob.class);
public static MetadataRecord parseRecord( public static MetadataRecord parseRecord(
final String input, final String input,
final String xpath, final String xpath,
@ -78,24 +86,28 @@ public class GenerateNativeStoreSparkJob {
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class); final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
final long dateOfCollection = new Long(parser.get("dateOfCollection")); final long dateOfCollection = new Long(parser.get("dateOfCollection"));
final SparkSession spark = SparkSession Boolean isSparkSessionManaged = Optional
.builder() .ofNullable(parser.get("isSparkSessionManaged"))
.appName("GenerateNativeStoreSparkJob") .map(Boolean::valueOf)
.master(parser.get("master")) .orElse(Boolean.TRUE);
.getOrCreate(); log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final Map<String, String> ongoingMap = new HashMap<>(); final Map<String, String> ongoingMap = new HashMap<>();
final Map<String, String> reportMap = new HashMap<>(); final Map<String, String> reportMap = new HashMap<>();
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final JavaPairRDD<IntWritable, Text> inputRDD = sc final JavaPairRDD<IntWritable, Text> inputRDD = sc
.sequenceFile(parser.get("input"), IntWritable.class, Text.class); .sequenceFile(parser.get("input"), IntWritable.class, Text.class);
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems"); final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords"); final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
final MessageManager manager = new MessageManager( final MessageManager manager = new MessageManager(
@ -157,5 +169,7 @@ public class GenerateNativeStoreSparkJob {
false); false);
manager.close(); manager.close();
} }
});
} }
} }

View File

@ -1,13 +1,17 @@
package eu.dnetlib.dhp.transformation; package eu.dnetlib.dhp.transformation;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
@ -17,8 +21,11 @@ import org.dom4j.Document;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.collection.GenerateNativeStoreSparkJob;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper; import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper;
@ -29,6 +36,8 @@ import eu.dnetlib.message.MessageType;
public class TransformSparkJobNode { public class TransformSparkJobNode {
private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class);
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser( final ArgumentApplicationParser parser = new ArgumentApplicationParser(
@ -40,12 +49,18 @@ public class TransformSparkJobNode {
parser.parseArgument(args); parser.parseArgument(args);
Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String inputPath = parser.get("input"); final String inputPath = parser.get("input");
final String outputPath = parser.get("output"); final String outputPath = parser.get("output");
final String workflowId = parser.get("workflowId"); final String workflowId = parser.get("workflowId");
final String trasformationRule = extractXSLTFromTR( final String trasformationRule = extractXSLTFromTR(
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule")))); Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
final String master = parser.get("master");
final String rabbitUser = parser.get("rabbitUser"); final String rabbitUser = parser.get("rabbitUser");
final String rabbitPassword = parser.get("rabbitPassword"); final String rabbitPassword = parser.get("rabbitPassword");
final String rabbitHost = parser.get("rabbitHost"); final String rabbitHost = parser.get("rabbitHost");
@ -53,12 +68,11 @@ public class TransformSparkJobNode {
final long dateOfCollection = new Long(parser.get("dateOfCollection")); final long dateOfCollection = new Long(parser.get("dateOfCollection"));
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
final SparkSession spark = SparkSession SparkConf conf = new SparkConf();
.builder() runWithSparkSession(
.appName("TransformStoreSparkJob") conf,
.master(master) isSparkSessionManaged,
.getOrCreate(); spark -> {
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class); final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder); final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems"); final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
@ -82,7 +96,8 @@ public class TransformSparkJobNode {
reportMap.put("mdStoreSize", "" + transformedItems.value()); reportMap.put("mdStoreSize", "" + transformedItems.value());
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap)); System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
if (!test) { if (!test) {
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false, final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false,
false,
null); null);
manager manager
.sendMessage( .sendMessage(
@ -93,6 +108,8 @@ public class TransformSparkJobNode {
manager.close(); manager.close();
} }
} }
});
} }
private static String extractXSLTFromTR(final String tr) throws DocumentException { private static String extractXSLTFromTR(final String tr) throws DocumentException {

View File

@ -1,16 +1,86 @@
[ [
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {
{"paramName":"e", "paramLongName":"encoding", "paramDescription": "the encoding of the input record should be JSON or XML", "paramRequired": true}, "paramName": "issm",
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, "paramLongName": "isSparkSessionManaged",
{"paramName":"p", "paramLongName":"provenance", "paramDescription": "the infos about the provenance of the collected records", "paramRequired": true}, "paramDescription": "when true will stop SparkSession after job execution",
{"paramName":"x", "paramLongName":"xpath", "paramDescription": "the xpath to identify the record ifentifier", "paramRequired": true}, "paramRequired": false
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, },
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, {
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, "paramName": "e",
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, "paramLongName": "encoding",
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, "paramDescription": "the encoding of the input record should be JSON or XML",
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, "paramRequired": true
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, },
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, {
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} "paramName": "d",
"paramLongName": "dateOfCollection",
"paramDescription": "the date when the record has been stored",
"paramRequired": true
},
{
"paramName": "p",
"paramLongName": "provenance",
"paramDescription": "the infos about the provenance of the collected records",
"paramRequired": true
},
{
"paramName": "x",
"paramLongName": "xpath",
"paramDescription": "the xpath to identify the record identifier",
"paramRequired": true
},
{
"paramName": "i",
"paramLongName": "input",
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
{
"paramName": "o",
"paramLongName": "output",
"paramDescription": "the path of the result DataFrame on HDFS",
"paramRequired": true
},
{
"paramName": "ru",
"paramLongName": "rabbitUser",
"paramDescription": "the user to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rp",
"paramLongName": "rabbitPassword",
"paramDescription": "the password to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rh",
"paramLongName": "rabbitHost",
"paramDescription": "the host of the RabbitMq server",
"paramRequired": true
},
{
"paramName": "ro",
"paramLongName": "rabbitOngoingQueue",
"paramDescription": "the name of the ongoing queue",
"paramRequired": true
},
{
"paramName": "rr",
"paramLongName": "rabbitReportQueue",
"paramDescription": "the name of the report queue",
"paramRequired": true
},
{
"paramName": "w",
"paramLongName": "workflowId",
"paramDescription": "the identifier of the dnet Workflow",
"paramRequired": true
},
{
"paramName": "t",
"paramLongName": "isTest",
"paramDescription": "the name of the report queue",
"paramRequired": false
}
] ]

View File

@ -1,16 +1,74 @@
[ [
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, "paramName": "issm",
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, "paramLongName": "isSparkSessionManaged",
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, "paramDescription": "when true will stop SparkSession after job execution",
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, "paramRequired": false
{"paramName":"tr", "paramLongName":"transformationRule","paramDescription": "the transformation Rule to apply to the input MDStore", "paramRequired": true}, },
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, {
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, "paramName": "d",
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, "paramLongName": "dateOfCollection",
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, "paramDescription": "the date when the record has been stored",
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, "paramRequired": true
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} },
{
"paramName": "i",
"paramLongName": "input",
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
{
"paramName": "o",
"paramLongName": "output",
"paramDescription": "the path of the result DataFrame on HDFS",
"paramRequired": true
},
{
"paramName": "w",
"paramLongName": "workflowId",
"paramDescription": "the identifier of the dnet Workflow",
"paramRequired": true
},
{
"paramName": "tr",
"paramLongName": "transformationRule",
"paramDescription": "the transformation Rule to apply to the input MDStore",
"paramRequired": true
},
{
"paramName": "ru",
"paramLongName": "rabbitUser",
"paramDescription": "the user to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rp",
"paramLongName": "rabbitPassword",
"paramDescription": "the password to connect with RabbitMq for messaging",
"paramRequired": true
},
{
"paramName": "rh",
"paramLongName": "rabbitHost",
"paramDescription": "the host of the RabbitMq server",
"paramRequired": true
},
{
"paramName": "ro",
"paramLongName": "rabbitOngoingQueue",
"paramDescription": "the name of the ongoing queue",
"paramRequired": true
},
{
"paramName": "rr",
"paramLongName": "rabbitReportQueue",
"paramDescription": "the name of the report queue",
"paramRequired": true
},
{
"paramName": "t",
"paramLongName": "isTest",
"paramDescription": "the name of the report queue",
"paramRequired": false
}
] ]

View File

@ -9,65 +9,60 @@ import java.nio.file.Path;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.AfterEach; import org.apache.spark.SparkConf;
import org.junit.jupiter.api.BeforeEach; import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.*;
import org.junit.jupiter.api.io.TempDir;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.model.mdstore.Provenance; import eu.dnetlib.dhp.model.mdstore.Provenance;
import eu.dnetlib.dhp.schema.common.ModelSupport;
public class CollectionJobTest { public class CollectionJobTest {
private Path testDir; private static SparkSession spark;
@BeforeEach @BeforeAll
public void setup() throws IOException { public static void beforeAll() {
testDir = Files.createTempDirectory("dhp-collection"); SparkConf conf = new SparkConf();
conf.setAppName(CollectionJobTest.class.getSimpleName());
conf.setMaster("local");
spark = SparkSession.builder().config(conf).getOrCreate();
} }
@AfterEach @AfterAll
public void teadDown() throws IOException { public static void afterAll() {
FileUtils.deleteDirectory(testDir.toFile()); spark.stop();
} }
@Test @Test
public void tesCollection() throws Exception { public void tesCollection(@TempDir Path testDir) throws Exception {
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix"); final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
Assertions.assertNotNull(new ObjectMapper().writeValueAsString(provenance));
GenerateNativeStoreSparkJob GenerateNativeStoreSparkJob
.main( .main(
new String[] { new String[] {
"-mt", "issm", "true",
"local", "-w", "wid",
"-w", "-e", "XML",
"wid", "-d", "" + System.currentTimeMillis(),
"-e", "-p", new ObjectMapper().writeValueAsString(provenance),
"XML", "-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
"-d", "-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
"" + System.currentTimeMillis(), "-o", testDir.toString() + "/store",
"-p", "-t", "true",
new ObjectMapper().writeValueAsString(provenance), "-ru", "",
"-x", "-rp", "",
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "-rh", "",
"-i", "-ro", "",
this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(), "-rr", ""
"-o",
testDir.toString() + "/store",
"-t",
"true",
"-ru",
"",
"-rp",
"",
"-rh",
"",
"-ro",
"",
"-rr",
""
}); });
System.out.println(new ObjectMapper().writeValueAsString(provenance));
// TODO introduce useful assertions
} }
@Test @Test
@ -85,9 +80,8 @@ public class CollectionJobTest {
null, null,
null); null);
assert record != null; assertNotNull(record.getId());
System.out.println(record.getId()); assertNotNull(record.getOriginalId());
System.out.println(record.getOriginalId());
} }
@Test @Test
@ -112,10 +106,12 @@ public class CollectionJobTest {
System.currentTimeMillis(), System.currentTimeMillis(),
null, null,
null); null);
assert record != null;
record.setBody("ciao"); record.setBody("ciao");
assert record1 != null;
record1.setBody("mondo"); record1.setBody("mondo");
assertNotNull(record);
assertNotNull(record1);
assertEquals(record, record1); assertEquals(record, record1);
} }
} }

View File

@ -12,10 +12,14 @@ import java.util.Map;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.util.LongAccumulator; import org.apache.spark.util.LongAccumulator;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
@ -23,6 +27,7 @@ import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import eu.dnetlib.dhp.collection.CollectionJobTest;
import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
import eu.dnetlib.dhp.transformation.functions.Cleaner; import eu.dnetlib.dhp.transformation.functions.Cleaner;
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
@ -33,6 +38,21 @@ import net.sf.saxon.s9api.*;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
public class TransformationJobTest { public class TransformationJobTest {
private static SparkSession spark;
@BeforeAll
public static void beforeAll() {
SparkConf conf = new SparkConf();
conf.setAppName(CollectionJobTest.class.getSimpleName());
conf.setMaster("local");
spark = SparkSession.builder().config(conf).getOrCreate();
}
@AfterAll
public static void afterAll() {
spark.stop();
}
@Mock @Mock
private LongAccumulator accumulator; private LongAccumulator accumulator;
@ -78,31 +98,21 @@ public class TransformationJobTest {
TransformSparkJobNode TransformSparkJobNode
.main( .main(
new String[] { new String[] {
"-mt", "-issm", "true",
"local", "-i", mdstore_input,
"-i", "-o", mdstore_output,
mdstore_input, "-d", "1",
"-o", "-w", "1",
mdstore_output, "-tr", xslt,
"-d", "-t", "true",
"1", "-ru", "",
"-w", "-rp", "",
"1", "-rh", "",
"-tr", "-ro", "",
xslt, "-rr", ""
"-t",
"true",
"-ru",
"",
"-rp",
"",
"-rh",
"",
"-ro",
"",
"-rr",
""
}); });
// TODO introduce useful assertions
} }
@Test @Test

View File

@ -0,0 +1,3 @@
# dhp-broker-events
dhp-broker-events is a DNET module responsible
of the production of events for the OpenAIRE Broker Service.

View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dhp-broker-events</artifactId>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-schemas</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
</dependency>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
</dependency>
<dependency>
<groupId>jaxen</groupId>
<artifactId>jaxen</artifactId>
</dependency>
<dependency>
<groupId>eu.dnetlib</groupId>
<artifactId>dnet-openaire-broker-common</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
</dependencies>
</project>

View File

@ -75,12 +75,20 @@
</configuration> </configuration>
</global> </global>
<start to="CreateSimRel"/> <start to="resetWorkingPath"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="resetWorkingPath">
<fs>
<delete path="${workingPath}"/>
</fs>
<ok to="CreateSimRel"/>
<error to="Kill"/>
</action>
<action name="CreateSimRel"> <action name="CreateSimRel">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>

View File

@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +25,6 @@ import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper; import org.dom4j.DocumentHelper;
import org.dom4j.Node; import org.dom4j.Node;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
@ -48,6 +48,21 @@ public abstract class AbstractMdRecordToOafMapper {
protected final Map<String, String> code2name; protected final Map<String, String> code2name;
protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4";
protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3";
protected static final Map<String, String> nsContext = new HashMap<>();
static {
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
nsContext.put("datacite", DATACITE_SCHEMA_KERNEL_3);
}
protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier( protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier(
"main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); "main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
@ -57,31 +72,27 @@ public abstract class AbstractMdRecordToOafMapper {
public List<Oaf> processMdRecord(final String xml) { public List<Oaf> processMdRecord(final String xml) {
try { try {
final Map<String, String> nsContext = new HashMap<>();
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
final Document doc = DocumentHelper final Document doc = DocumentHelper
.parseText( .parseText(
xml xml.replaceAll(DATACITE_SCHEMA_KERNEL_4, DATACITE_SCHEMA_KERNEL_3));
.replaceAll(
"http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
final String type = doc.valueOf("//dr:CobjCategory/@type"); final String type = doc.valueOf("//dr:CobjCategory/@type");
final KeyValue collectedFrom = keyValue( final KeyValue collectedFrom = getProvenanceDatasource(
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc, "//oaf:collectedFrom/@id", "//oaf:collectedFrom/@name");
doc.valueOf("//oaf:collectedFrom/@name"));
if (collectedFrom == null) {
return null;
}
final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id")) final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
? collectedFrom ? collectedFrom
: keyValue( : getProvenanceDatasource(doc, "//oaf:hostedBy/@id", "//oaf:hostedBy/@name");
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
doc.valueOf("//oaf:hostedBy/@name")); if (hostedBy == null) {
return null;
}
final DataInfo info = prepareDataInfo(doc); final DataInfo info = prepareDataInfo(doc);
final long lastUpdateTimestamp = new Date().getTime(); final long lastUpdateTimestamp = new Date().getTime();
@ -92,6 +103,19 @@ public abstract class AbstractMdRecordToOafMapper {
} }
} }
private KeyValue getProvenanceDatasource(Document doc, String xpathId, String xpathName) {
final String dsId = doc.valueOf(xpathId);
final String dsName = doc.valueOf(xpathName);
if (StringUtils.isBlank(dsId) | StringUtils.isBlank(dsName)) {
return null;
}
return keyValue(
createOpenaireId(10, dsId, true),
dsName);
}
protected List<Oaf> createOafs( protected List<Oaf> createOafs(
final Document doc, final Document doc,
final String type, final String type,
@ -107,14 +131,14 @@ public abstract class AbstractMdRecordToOafMapper {
case "publication": case "publication":
final Publication p = new Publication(); final Publication p = new Publication();
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER); p.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
p.setJournal(prepareJournal(doc, info)); p.setJournal(prepareJournal(doc, info));
oafs.add(p); oafs.add(p);
break; break;
case "dataset": case "dataset":
final Dataset d = new Dataset(); final Dataset d = new Dataset();
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER); d.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
d.setStoragedate(prepareDatasetStorageDate(doc, info)); d.setStoragedate(prepareDatasetStorageDate(doc, info));
d.setDevice(prepareDatasetDevice(doc, info)); d.setDevice(prepareDatasetDevice(doc, info));
d.setSize(prepareDatasetSize(doc, info)); d.setSize(prepareDatasetSize(doc, info));
@ -127,7 +151,7 @@ public abstract class AbstractMdRecordToOafMapper {
case "software": case "software":
final Software s = new Software(); final Software s = new Software();
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER); s.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
s.setLicense(prepareSoftwareLicenses(doc, info)); s.setLicense(prepareSoftwareLicenses(doc, info));
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info)); s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
@ -138,7 +162,7 @@ public abstract class AbstractMdRecordToOafMapper {
default: default:
final OtherResearchProduct o = new OtherResearchProduct(); final OtherResearchProduct o = new OtherResearchProduct();
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER); o.setResulttype(ORP_DEFAULT_RESULTTYPE);
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
o.setTool(prepareOtherResearchProductTools(doc, info)); o.setTool(prepareOtherResearchProductTools(doc, info));
@ -171,33 +195,36 @@ public abstract class AbstractMdRecordToOafMapper {
if (StringUtils.isNotBlank(originalId)) { if (StringUtils.isNotBlank(originalId)) {
final String projectId = createOpenaireId(40, originalId, true); final String projectId = createOpenaireId(40, originalId, true);
final Relation r1 = new Relation(); res
r1.setRelType("resultProject"); .add(
r1.setSubRelType("outcome"); getRelation(
r1.setRelClass("isProducedBy"); docId, projectId, RESULT_PROJECT, OUTCOME, IS_PRODUCED_BY, collectedFrom, info,
r1.setSource(docId); lastUpdateTimestamp));
r1.setTarget(projectId); res
r1.setCollectedfrom(Arrays.asList(collectedFrom)); .add(
r1.setDataInfo(info); getRelation(
r1.setLastupdatetimestamp(lastUpdateTimestamp); projectId, docId, RESULT_PROJECT, OUTCOME, PRODUCES, collectedFrom, info,
res.add(r1); lastUpdateTimestamp));
final Relation r2 = new Relation();
r2.setRelType("resultProject");
r2.setSubRelType("outcome");
r2.setRelClass("produces");
r2.setSource(projectId);
r2.setTarget(docId);
r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info);
r2.setLastupdatetimestamp(lastUpdateTimestamp);
res.add(r2);
} }
} }
return res; return res;
} }
protected Relation getRelation(String source, String target, String relType, String subRelType, String relClass,
KeyValue collectedFrom, DataInfo info, long lastUpdateTimestamp) {
final Relation rel = new Relation();
rel.setRelType(relType);
rel.setSubRelType(subRelType);
rel.setRelClass(relClass);
rel.setSource(source);
rel.setTarget(target);
rel.setCollectedfrom(Arrays.asList(collectedFrom));
rel.setDataInfo(info);
rel.setLastupdatetimestamp(lastUpdateTimestamp);
return rel;
}
protected abstract List<Oaf> addOtherResultRels( protected abstract List<Oaf> addOtherResultRels(
final Document doc, final Document doc,
final KeyValue collectedFrom, final KeyValue collectedFrom,
@ -423,7 +450,7 @@ public abstract class AbstractMdRecordToOafMapper {
if (n == null) { if (n == null) {
return dataInfo( return dataInfo(
false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); false, null, false, false, REPOSITORY_PROVENANCE_ACTIONS, "0.9");
} }
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");

View File

@ -95,6 +95,7 @@ public class GenerateEntitiesApplication {
.sequenceFile(sp, Text.class, Text.class) .sequenceFile(sp, Text.class, Text.class)
.map(k -> new Tuple2<>(k._1().toString(), k._2().toString())) .map(k -> new Tuple2<>(k._1().toString(), k._2().toString()))
.map(k -> convertToListOaf(k._1(), k._2(), code2name)) .map(k -> convertToListOaf(k._1(), k._2(), code2name))
.filter(Objects::nonNull)
.flatMap(list -> list.iterator())); .flatMap(list -> list.iterator()));
} }

View File

@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -31,7 +32,6 @@ import org.apache.commons.logging.LogFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication; import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient; import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
@ -55,6 +55,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class);
public static final String SOURCE_TYPE = "source_type";
public static final String TARGET_TYPE = "target_type";
private final DbClient dbClient; private final DbClient dbClient;
private final long lastUpdateTimestamp; private final long lastUpdateTimestamp;
@ -304,9 +307,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("datasourceOrganization"); r1.setRelType(DATASOURCE_ORGANIZATION);
r1.setSubRelType("provision"); r1.setSubRelType(PROVISION);
r1.setRelClass("isProvidedBy"); r1.setRelClass(IS_PROVIDED_BY);
r1.setSource(dsId); r1.setSource(dsId);
r1.setTarget(orgId); r1.setTarget(orgId);
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
@ -314,9 +317,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
r1.setLastupdatetimestamp(lastUpdateTimestamp); r1.setLastupdatetimestamp(lastUpdateTimestamp);
final Relation r2 = new Relation(); final Relation r2 = new Relation();
r2.setRelType("datasourceOrganization"); r2.setRelType(DATASOURCE_ORGANIZATION);
r2.setSubRelType("provision"); r2.setSubRelType(PROVISION);
r2.setRelClass("provides"); r2.setRelClass(PROVIDES);
r2.setSource(orgId); r2.setSource(orgId);
r2.setTarget(dsId); r2.setTarget(dsId);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
@ -338,9 +341,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("projectOrganization"); r1.setRelType(PROJECT_ORGANIZATION);
r1.setSubRelType("participation"); r1.setSubRelType(PARTICIPATION);
r1.setRelClass("hasParticipant"); r1.setRelClass(HAS_PARTICIPANT);
r1.setSource(projectId); r1.setSource(projectId);
r1.setTarget(orgId); r1.setTarget(orgId);
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
@ -348,9 +351,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
r1.setLastupdatetimestamp(lastUpdateTimestamp); r1.setLastupdatetimestamp(lastUpdateTimestamp);
final Relation r2 = new Relation(); final Relation r2 = new Relation();
r2.setRelType("projectOrganization"); r2.setRelType(PROJECT_ORGANIZATION);
r2.setSubRelType("participation"); r2.setSubRelType(PARTICIPATION);
r2.setRelClass("isParticipant"); r2.setRelClass(IS_PARTICIPANT);
r2.setSource(orgId); r2.setSource(orgId);
r2.setTarget(projectId); r2.setTarget(projectId);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
@ -367,28 +370,30 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final DataInfo info = dataInfo( final DataInfo info = dataInfo(
false, null, false, false, false, null, false, false,
qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9");
qualifier(USER_CLAIM, USER_CLAIM, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), "0.9");
final List<KeyValue> collectedFrom = listKeyValues( final List<KeyValue> collectedFrom = listKeyValues(
createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE"); createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
try { try {
if (rs.getString("source_type").equals("context")) { if (rs.getString(SOURCE_TYPE).equals("context")) {
final Result r; final Result r;
if (rs.getString("target_type").equals("dataset")) { if (rs.getString(TARGET_TYPE).equals("dataset")) {
r = new Dataset(); r = new Dataset();
r.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER); r.setResulttype(DATASET_DEFAULT_RESULTTYPE);
} else if (rs.getString("target_type").equals("software")) { } else if (rs.getString(TARGET_TYPE).equals("software")) {
r = new Software(); r = new Software();
r.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER); r.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
} else if (rs.getString("target_type").equals("other")) { } else if (rs.getString(TARGET_TYPE).equals("other")) {
r = new OtherResearchProduct(); r = new OtherResearchProduct();
r.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER); r.setResulttype(ORP_DEFAULT_RESULTTYPE);
} else { } else {
r = new Publication(); r = new Publication();
r.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER); r.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
} }
r.setId(createOpenaireId(50, rs.getString("target_id"), false)); r.setId(createOpenaireId(50, rs.getString("target_id"), false));
r.setLastupdatetimestamp(lastUpdateTimestamp); r.setLastupdatetimestamp(lastUpdateTimestamp);
@ -398,32 +403,32 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
return Arrays.asList(r); return Arrays.asList(r);
} else { } else {
final String sourceId = createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false); final String sourceId = createOpenaireId(rs.getString(SOURCE_TYPE), rs.getString("source_id"), false);
final String targetId = createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false); final String targetId = createOpenaireId(rs.getString(TARGET_TYPE), rs.getString("target_id"), false);
final Relation r1 = new Relation(); final Relation r1 = new Relation();
final Relation r2 = new Relation(); final Relation r2 = new Relation();
if (rs.getString("source_type").equals("project")) { if (rs.getString(SOURCE_TYPE).equals("project")) {
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
r1.setRelType("resultProject"); r1.setRelType(RESULT_PROJECT);
r1.setSubRelType("outcome"); r1.setSubRelType(OUTCOME);
r1.setRelClass("produces"); r1.setRelClass(PRODUCES);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
r2.setRelType("resultProject"); r2.setRelType(RESULT_PROJECT);
r2.setSubRelType("outcome"); r2.setSubRelType(OUTCOME);
r2.setRelClass("isProducedBy"); r2.setRelClass(IS_PRODUCED_BY);
} else { } else {
r1.setCollectedfrom(collectedFrom); r1.setCollectedfrom(collectedFrom);
r1.setRelType("resultResult"); r1.setRelType(RESULT_RESULT);
r1.setSubRelType("relationship"); r1.setSubRelType(RELATIONSHIP);
r1.setRelClass("isRelatedTo"); r1.setRelClass(IS_RELATED_TO);
r2.setCollectedfrom(collectedFrom); r2.setCollectedfrom(collectedFrom);
r2.setRelType("resultResult"); r2.setRelType(RESULT_RESULT);
r2.setSubRelType("relationship"); r2.setSubRelType(RELATIONSHIP);
r2.setRelClass("isRelatedTo"); r2.setRelClass(IS_RELATED_TO);
} }
r1.setSource(sourceId); r1.setSource(sourceId);
@ -457,8 +462,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final Boolean inferred = rs.getBoolean("inferred"); final Boolean inferred = rs.getBoolean("inferred");
final String trust = rs.getString("trust"); final String trust = rs.getString("trust");
return dataInfo( return dataInfo(
deletedbyinference, inferenceprovenance, inferred, false,
MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust); deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust);
} }
private Qualifier prepareQualifierSplitting(final String s) { private Qualifier prepareQualifierSplitting(final String s) {
@ -514,9 +520,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
if (arr.length == 3) { if (arr.length == 3) {
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null; final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null;
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null; final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null;
;
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;
;
if (issn != null || eissn != null || lissn != null) { if (issn != null || eissn != null || lissn != null) {
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
} }

View File

@ -3,6 +3,7 @@ package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -13,17 +14,10 @@ import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import com.google.common.collect.Lists;
import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson; import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.GeoLocation;
import eu.dnetlib.dhp.schema.oaf.Instance;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
public class OafToOafMapper extends AbstractMdRecordToOafMapper { public class OafToOafMapper extends AbstractMdRecordToOafMapper {
@ -52,7 +46,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
@Override @Override
protected Qualifier prepareLanguages(final Document doc) { protected Qualifier prepareLanguages(final Document doc) {
return prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages"); return prepareQualifier(doc, "//dc:language", DNET_LANGUAGES, DNET_LANGUAGES);
} }
@Override @Override
@ -96,26 +90,22 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
final DataInfo info, final DataInfo info,
final KeyValue collectedfrom, final KeyValue collectedfrom,
final KeyValue hostedby) { final KeyValue hostedby) {
final List<Instance> res = new ArrayList<>();
for (final Object o : doc.selectNodes("//dc:identifier")) {
final String url = ((Node) o).getText().trim();
if (url.startsWith("http")) {
final Instance instance = new Instance(); final Instance instance = new Instance();
instance.setUrl(Arrays.asList(url));
instance instance
.setInstancetype( .setInstancetype(
prepareQualifier( prepareQualifier(
doc, doc,
"//dr:CobjCategory", "//dr:CobjCategory",
"dnet:publication_resource", DNET_PUBLICATION_RESOURCE,
"dnet:publication_resource")); DNET_PUBLICATION_RESOURCE));
instance.setCollectedfrom(collectedfrom); instance.setCollectedfrom(collectedfrom);
instance.setHostedby(hostedby); instance.setHostedby(hostedby);
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
instance instance
.setAccessright( .setAccessright(
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
instance.setLicense(field(doc.valueOf("//oaf:license"), info)); instance.setLicense(field(doc.valueOf("//oaf:license"), info));
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
instance instance
@ -124,10 +114,14 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
instance instance
.setProcessingchargecurrency( .setProcessingchargecurrency(
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info)); field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
res.add(instance);
for (final Object o : doc.selectNodes("//dc:identifier")) {
final String url = ((Node) o).getText().trim();
if (url.startsWith("http")) {
instance.setUrl(Arrays.asList(url));
} }
} }
return res; return Lists.newArrayList(instance);
} }
@Override @Override
@ -241,27 +235,16 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
final String otherId = createOpenaireId(50, originalId, false); final String otherId = createOpenaireId(50, originalId, false);
final Relation r1 = new Relation(); res
r1.setRelType("resultResult"); .add(
r1.setSubRelType("publicationDataset"); getRelation(
r1.setRelClass("isRelatedTo"); docId, otherId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
r1.setSource(docId); lastUpdateTimestamp));
r1.setTarget(otherId); res
r1.setCollectedfrom(Arrays.asList(collectedFrom)); .add(
r1.setDataInfo(info); getRelation(
r1.setLastupdatetimestamp(lastUpdateTimestamp); otherId, docId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
res.add(r1); lastUpdateTimestamp));
final Relation r2 = new Relation();
r2.setRelType("resultResult");
r2.setSubRelType("publicationDataset");
r2.setRelClass("isRelatedTo");
r2.setSource(otherId);
r2.setTarget(docId);
r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info);
r2.setLastupdatetimestamp(lastUpdateTimestamp);
res.add(r2);
} }
} }
return res; return res;

View File

@ -4,6 +4,7 @@ package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -14,6 +15,7 @@ import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Field; import eu.dnetlib.dhp.schema.oaf.Field;
@ -27,6 +29,8 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
public class OdfToOafMapper extends AbstractMdRecordToOafMapper { public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
public static final String HTTP_DX_DOI_PREIFX = "http://dx.doi.org/";
public OdfToOafMapper(final Map<String, String> code2name) { public OdfToOafMapper(final Map<String, String> code2name) {
super(code2name); super(code2name);
} }
@ -62,7 +66,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
structuredProperty( structuredProperty(
((Node) o).getText(), ((Node) o).getText(),
prepareQualifier( prepareQualifier(
(Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"), (Node) o, "./@nameIdentifierScheme", DNET_PID_TYPES, DNET_PID_TYPES),
info)); info));
} }
return res; return res;
@ -80,14 +84,14 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
instance instance
.setInstancetype( .setInstancetype(
prepareQualifier( prepareQualifier(
doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource")); doc, "//dr:CobjCategory", DNET_PUBLICATION_RESOURCE, DNET_PUBLICATION_RESOURCE));
instance.setCollectedfrom(collectedfrom); instance.setCollectedfrom(collectedfrom);
instance.setHostedby(hostedby); instance.setHostedby(hostedby);
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
instance instance
.setAccessright( .setAccessright(
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
instance.setLicense(field(doc.valueOf("//oaf:license"), info)); instance.setLicense(field(doc.valueOf("//oaf:license"), info));
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info)); instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
@ -102,10 +106,10 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
instance.getUrl().add(((Node) o).getText().trim()); instance.getUrl().add(((Node) o).getText().trim());
} }
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) { for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim()); instance.getUrl().add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
} }
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) { for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) {
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim()); instance.getUrl().add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
} }
return Arrays.asList(instance); return Arrays.asList(instance);
} }
@ -131,8 +135,8 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
((Node) o).getText(), ((Node) o).getText(),
"UNKNOWN", "UNKNOWN",
"UNKNOWN", "UNKNOWN",
"dnet:dataCite_date", DNET_DATA_CITE_DATE,
"dnet:dataCite_date", DNET_DATA_CITE_DATE,
info)); info));
} }
} }
@ -171,7 +175,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
@Override @Override
protected Qualifier prepareLanguages(final Document doc) { protected Qualifier prepareLanguages(final Document doc) {
return prepareQualifier(doc, "//datacite:language", "dnet:languages", "dnet:languages"); return prepareQualifier(doc, "//datacite:language", DNET_LANGUAGES, DNET_LANGUAGES);
} }
@Override @Override
@ -292,36 +296,29 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
final String otherId = createOpenaireId(50, originalId, false); final String otherId = createOpenaireId(50, originalId, false);
final String type = ((Node) o).valueOf("@relationType"); final String type = ((Node) o).valueOf("@relationType");
if (type.equals("IsSupplementTo")) { if (type.equalsIgnoreCase("IsSupplementTo")) {
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, docId, otherId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENT_TO, collectedFrom, info,
info, lastUpdateTimestamp));
lastUpdateTimestamp,
docId,
otherId,
"supplement",
"isSupplementTo"));
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, otherId, docId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENTED_BY, collectedFrom, info,
info, lastUpdateTimestamp));
lastUpdateTimestamp,
otherId,
docId,
"supplement",
"isSupplementedBy"));
} else if (type.equals("IsPartOf")) { } else if (type.equals("IsPartOf")) {
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf")); docId, otherId, RESULT_RESULT, PART, IS_PART_OF, collectedFrom, info,
lastUpdateTimestamp));
res res
.add( .add(
prepareOtherResultRel( getRelation(
collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts")); otherId, docId, RESULT_RESULT, PART, HAS_PARTS, collectedFrom, info,
lastUpdateTimestamp));
} else { } else {
} }
} }
@ -329,32 +326,12 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
return res; return res;
} }
private Relation prepareOtherResultRel(
final KeyValue collectedFrom,
final DataInfo info,
final long lastUpdateTimestamp,
final String source,
final String target,
final String subRelType,
final String relClass) {
final Relation r = new Relation();
r.setRelType("resultResult");
r.setSubRelType(subRelType);
r.setRelClass(relClass);
r.setSource(source);
r.setTarget(target);
r.setCollectedfrom(Arrays.asList(collectedFrom));
r.setDataInfo(info);
r.setLastupdatetimestamp(lastUpdateTimestamp);
return r;
}
@Override @Override
protected Qualifier prepareResourceType(final Document doc, final DataInfo info) { protected Qualifier prepareResourceType(final Document doc, final DataInfo info) {
return prepareQualifier( return prepareQualifier(
doc, doc,
"//*[local-name() = 'resource']//*[local-name() = 'resourceType']", "//*[local-name() = 'resource']//*[local-name() = 'resourceType']",
"dnet:dataCite_resource", DNET_DATA_CITE_RESOURCE,
"dnet:dataCite_resource"); DNET_DATA_CITE_RESOURCE);
} }
} }

View File

@ -1,27 +0,0 @@
package eu.dnetlib.dhp.oa.graph.raw.common;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class MigrationConstants {
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = qualifier(
"publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier(
"dataset", "dataset",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier(
"software", "software",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier(
"other", "other",
"dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
"sysimport:crosswalk:repository", "sysimport:crosswalk:repository",
"dnet:provenanceActions", "dnet:provenanceActions");
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
"sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry",
"dnet:provenanceActions", "dnet:provenanceActions");
}

View File

@ -115,11 +115,11 @@
<delete path="${contentPath}/db_claims"/> <delete path="${contentPath}/db_claims"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_claims</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>-a</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
</java> </java>
<ok to="ImportODF_claims"/> <ok to="ImportODF_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -165,10 +165,10 @@
<delete path="${contentPath}/db_records"/> <delete path="${contentPath}/db_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java> </java>
<ok to="ImportODF"/> <ok to="ImportODF"/>
<error to="Kill"/> <error to="Kill"/>
@ -180,12 +180,12 @@
<delete path="${contentPath}/odf_records"/> <delete path="${contentPath}/odf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/odf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>ODF</arg> <arg>--mdFormat</arg><arg>ODF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="ImportOAF"/> <ok to="ImportOAF"/>
<error to="Kill"/> <error to="Kill"/>
@ -197,12 +197,12 @@
<delete path="${contentPath}/oaf_records"/> <delete path="${contentPath}/oaf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/oaf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>OAF</arg> <arg>--mdFormat</arg><arg>OAF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="wait_import"/> <ok to="wait_import"/>
<error to="Kill"/> <error to="Kill"/>
@ -231,11 +231,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>-t</arg><arg>${workingDir}/entities_claim</arg> <arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph_claims"/> <ok to="GenerateGraph_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -257,8 +257,8 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities_claim</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-g</arg><arg>${workingDir}/graph_claims</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_claims</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>
@ -280,11 +280,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
<arg>-t</arg><arg>${workingDir}/entities</arg> <arg>--targetPath</arg><arg>${workingDir}/entities</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph"/> <ok to="GenerateGraph"/>
<error to="Kill"/> <error to="Kill"/>
@ -307,8 +307,8 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=7680 --conf spark.sql.shuffle.partitions=7680
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
<arg>-g</arg><arg>${workingDir}/graph_raw</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -1,8 +1,8 @@
<workflow-app name="import db entities (step 1)" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="import DB entities" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>migrationPathStep1</name> <name>contentPath</name>
<description>the base path to store hdfs file</description> <description>path location to store (or reuse) content from the aggregator</description>
</property> </property>
<property> <property>
<name>postgresURL</name> <name>postgresURL</name>
@ -16,6 +16,7 @@
<name>postgresPassword</name> <name>postgresPassword</name>
<description>the password postgres</description> <description>the password postgres</description>
</property> </property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -28,31 +29,81 @@
<name>sparkExecutorCores</name> <name>sparkExecutorCores</name>
<description>number of cores used by single executor</description> <description>number of cores used by single executor</description>
</property> </property>
<property>
<name>oozieActionShareLibForSpark2</name>
<description>oozie action sharelib for spark 2.*</description>
</property>
<property>
<name>spark2ExtraListeners</name>
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
<description>spark 2.* extra listeners classname</description>
</property>
<property>
<name>spark2SqlQueryExecutionListeners</name>
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
<description>spark 2.* sql query execution listeners classname</description>
</property>
<property>
<name>spark2YarnHistoryServerAddress</name>
<description>spark 2.* yarn history server address</description>
</property>
<property>
<name>spark2EventLogDir</name>
<description>spark 2.* event log dir location</description>
</property>
</parameters> </parameters>
<start to="ResetWorkingPath"/> <global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration>
</global>
<start to="ImportDB"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="ResetWorkingPath"> <action name="ImportDB">
<fs> <java>
<delete path='${migrationPathStep1}/db_records'/> <prepare>
</fs> <delete path="${contentPath}/db_records"/>
<ok to="ImportDB"/> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java>
<ok to="ImportDB_claims"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="ImportDB"> <action name="ImportDB_claims">
<java> <java>
<job-tracker>${jobTracker}</job-tracker> <prepare>
<name-node>${nameNode}</name-node> <delete path="${contentPath}/db_claims"/>
<main-class>eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication</main-class> </prepare>
<arg>-p</arg><arg>${migrationPathStep1}/db_records</arg> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--action</arg><arg>claims</arg>
</java> </java>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -28,6 +28,7 @@
<module>dhp-dedup-scholexplorer</module> <module>dhp-dedup-scholexplorer</module>
<module>dhp-graph-provision-scholexplorer</module> <module>dhp-graph-provision-scholexplorer</module>
<module>dhp-stats-update</module> <module>dhp-stats-update</module>
<module>dhp-broker-events</module>
</modules> </modules>
<pluginRepositories> <pluginRepositories>