Merge remote-tracking branch 'origin/master' into doiboost

This commit is contained in:
Sandro La Bruzzo 2020-04-22 15:00:57 +02:00
commit bb6c9785b4
13 changed files with 1140 additions and 1211 deletions

View File

@ -11,8 +11,6 @@ import eu.dnetlib.pace.config.DedupConfig;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
@ -72,12 +70,9 @@ public class SparkCreateDedupRecord extends AbstractSparkAction {
Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity)); Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz) DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz)
.map(
(MapFunction<OafEntity, String>)
value -> OBJECT_MAPPER.writeValueAsString(value),
Encoders.STRING())
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath); .json(outputPath);
} }
} }

View File

@ -33,7 +33,7 @@ public class SparkUpdateEntity extends AbstractSparkAction {
private static final Logger log = LoggerFactory.getLogger(SparkUpdateEntity.class); private static final Logger log = LoggerFactory.getLogger(SparkUpdateEntity.class);
final String IDJSONPATH = "$.id"; private static final String IDJSONPATH = "$.id";
public SparkUpdateEntity(ArgumentApplicationParser parser, SparkSession spark) { public SparkUpdateEntity(ArgumentApplicationParser parser, SparkSession spark) {
super(parser, spark); super(parser, spark);
@ -65,27 +65,25 @@ public class SparkUpdateEntity extends AbstractSparkAction {
log.info("workingPath: '{}'", workingPath); log.info("workingPath: '{}'", workingPath);
log.info("dedupGraphPath: '{}'", dedupGraphPath); log.info("dedupGraphPath: '{}'", dedupGraphPath);
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
// for each entity // for each entity
ModelSupport.entityTypes.forEach( ModelSupport.entityTypes.forEach(
(entity, clazz) -> { (type, clazz) -> {
final String outputPath = dedupGraphPath + "/" + entity; final String outputPath = dedupGraphPath + "/" + type;
removeOutputDir(spark, outputPath); removeOutputDir(spark, outputPath);
JavaRDD<String> sourceEntity = JavaRDD<String> sourceEntity =
sc.textFile( sc.textFile(
DedupUtility.createEntityPath( DedupUtility.createEntityPath(graphBasePath, type.toString()));
graphBasePath, entity.toString()));
if (mergeRelExists(workingPath, entity.toString())) { if (mergeRelExists(workingPath, type.toString())) {
final String mergeRelPath = final String mergeRelPath =
DedupUtility.createMergeRelPath( DedupUtility.createMergeRelPath(workingPath, "*", type.toString());
workingPath, "*", entity.toString());
final String dedupRecordPath = final String dedupRecordPath =
DedupUtility.createDedupRecordPath( DedupUtility.createDedupRecordPath(
workingPath, "*", entity.toString()); workingPath, "*", type.toString());
final Dataset<Relation> rel = final Dataset<Relation> rel =
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class)); spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
@ -107,7 +105,6 @@ public class SparkUpdateEntity extends AbstractSparkAction {
MapDocumentUtil.getJPathString( MapDocumentUtil.getJPathString(
IDJSONPATH, s), IDJSONPATH, s),
s)); s));
JavaRDD<String> map = JavaRDD<String> map =
entitiesWithId entitiesWithId
.leftOuterJoin(mergedIds) .leftOuterJoin(mergedIds)

View File

@ -72,7 +72,7 @@ public class SparkDedupTest implements Serializable {
spark = spark =
SparkSession.builder() SparkSession.builder()
.appName(SparkCreateSimRels.class.getSimpleName()) .appName(SparkDedupTest.class.getSimpleName())
.master("local[*]") .master("local[*]")
.config(new SparkConf()) .config(new SparkConf())
.getOrCreate(); .getOrCreate();
@ -272,7 +272,17 @@ public class SparkDedupTest implements Serializable {
.distinct() .distinct()
.count(); .count();
assertEquals(831, publications); long mergedSw =
spark.read()
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
.as(Encoders.bean(Relation.class))
.where("relClass=='merges'")
.javaRDD()
.map(Relation::getTarget)
.distinct()
.count();
assertEquals(897, publications);
assertEquals(835, organizations); assertEquals(835, organizations);
assertEquals(100, projects); assertEquals(100, projects);
assertEquals(100, datasource); assertEquals(100, datasource);
@ -288,8 +298,14 @@ public class SparkDedupTest implements Serializable {
.filter(this::isDeletedByInference) .filter(this::isDeletedByInference)
.count(); .count();
long deletedSw =
jsc.textFile(testDedupGraphBasePath + "/software")
.filter(this::isDeletedByInference)
.count();
assertEquals(mergedOrgs, deletedOrgs); assertEquals(mergedOrgs, deletedOrgs);
assertEquals(mergedPubs, deletedPubs); assertEquals(mergedPubs, deletedPubs);
assertEquals(mergedSw, deletedSw);
} }
@Test @Test

View File

@ -10,6 +10,20 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Node;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
@ -27,17 +41,6 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.Software;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Node;
public abstract class AbstractMdRecordToOafMapper { public abstract class AbstractMdRecordToOafMapper {
@ -46,25 +49,6 @@ public abstract class AbstractMdRecordToOafMapper {
protected static final Qualifier MAIN_TITLE_QUALIFIER = protected static final Qualifier MAIN_TITLE_QUALIFIER =
qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
protected static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
qualifier(
"publication",
"publication",
"dnet:result_typologies",
"dnet:result_typologies");
protected static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
protected static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
protected static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
protected static final Qualifier REPOSITORY_QUALIFIER =
qualifier(
"sysimport:crosswalk:repository",
"sysimport:crosswalk:repository",
"dnet:provenanceActions",
"dnet:provenanceActions");
protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) { protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) {
this.code2name = code2name; this.code2name = code2name;
} }
@ -82,22 +66,15 @@ public abstract class AbstractMdRecordToOafMapper {
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
final Document doc = final Document doc =
DocumentHelper.parseText( DocumentHelper.parseText(xml.replaceAll("http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
xml.replaceAll(
"http://datacite.org/schema/kernel-4",
"http://datacite.org/schema/kernel-3"));
final String type = doc.valueOf("//dr:CobjCategory/@type"); final String type = doc.valueOf("//dr:CobjCategory/@type");
final KeyValue collectedFrom = final KeyValue collectedFrom =
keyValue( keyValue(createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc.valueOf("//oaf:collectedFrom/@name"));
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true),
doc.valueOf("//oaf:collectedFrom/@name"));
final KeyValue hostedBy = final KeyValue hostedBy =
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id")) StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
? collectedFrom ? collectedFrom
: keyValue( : keyValue(createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true), doc.valueOf("//oaf:hostedBy/@name"));
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
doc.valueOf("//oaf:hostedBy/@name"));
final DataInfo info = prepareDataInfo(doc); final DataInfo info = prepareDataInfo(doc);
final long lastUpdateTimestamp = new Date().getTime(); final long lastUpdateTimestamp = new Date().getTime();
@ -123,14 +100,14 @@ public abstract class AbstractMdRecordToOafMapper {
case "publication": case "publication":
final Publication p = new Publication(); final Publication p = new Publication();
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER); p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
p.setJournal(prepareJournal(doc, info)); p.setJournal(prepareJournal(doc, info));
oafs.add(p); oafs.add(p);
break; break;
case "dataset": case "dataset":
final Dataset d = new Dataset(); final Dataset d = new Dataset();
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
d.setResulttype(DATASET_RESULTTYPE_QUALIFIER); d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
d.setStoragedate(prepareDatasetStorageDate(doc, info)); d.setStoragedate(prepareDatasetStorageDate(doc, info));
d.setDevice(prepareDatasetDevice(doc, info)); d.setDevice(prepareDatasetDevice(doc, info));
d.setSize(prepareDatasetSize(doc, info)); d.setSize(prepareDatasetSize(doc, info));
@ -143,7 +120,7 @@ public abstract class AbstractMdRecordToOafMapper {
case "software": case "software":
final Software s = new Software(); final Software s = new Software();
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER); s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
s.setLicense(prepareSoftwareLicenses(doc, info)); s.setLicense(prepareSoftwareLicenses(doc, info));
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info)); s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
@ -154,7 +131,7 @@ public abstract class AbstractMdRecordToOafMapper {
default: default:
final OtherResearchProduct o = new OtherResearchProduct(); final OtherResearchProduct o = new OtherResearchProduct();
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
o.setResulttype(OTHER_RESULTTYPE_QUALIFIER); o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
o.setTool(prepareOtherResearchProductTools(doc, info)); o.setTool(prepareOtherResearchProductTools(doc, info));
@ -227,14 +204,7 @@ public abstract class AbstractMdRecordToOafMapper {
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false)); r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier"))); r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
r.setCollectedfrom(Arrays.asList(collectedFrom)); r.setCollectedfrom(Arrays.asList(collectedFrom));
r.setPid( r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info));
prepareListStructProps(
doc,
"//oaf:identifier",
"@identifierType",
"dnet:pid_types",
"dnet:pid_types",
info));
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection")); r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation")); r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
@ -263,7 +233,10 @@ public abstract class AbstractMdRecordToOafMapper {
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info); protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
protected abstract List<Instance> prepareInstances( protected abstract List<Instance> prepareInstances(
Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby); Document doc,
DataInfo info,
KeyValue collectedfrom,
KeyValue hostedby);
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info); protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
@ -288,28 +261,34 @@ public abstract class AbstractMdRecordToOafMapper {
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info); protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductTools( protected abstract List<Field<String>> prepareOtherResearchProductTools(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups( protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons( protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info); protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info); protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareSoftwareLicenses( protected abstract List<StructuredProperty> prepareSoftwareLicenses(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls( protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info); protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetMetadataVersionNumber( protected abstract Field<String> prepareDatasetMetadataVersionNumber(
Document doc, DataInfo info); Document doc,
DataInfo info);
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info); protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
@ -333,27 +312,16 @@ public abstract class AbstractMdRecordToOafMapper {
final String sp = n.valueOf("@sp"); final String sp = n.valueOf("@sp");
final String vol = n.valueOf("@vol"); final String vol = n.valueOf("@vol");
final String edition = n.valueOf("@edition"); final String edition = n.valueOf("@edition");
if (StringUtils.isNotBlank(name)) { if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, ep, iss, sp, vol, edition, null, null, info); }
return journal(
name,
issnPrinted,
issnOnline,
issnLinking,
ep,
iss,
sp,
vol,
edition,
null,
null,
info);
}
} }
return null; return null;
} }
protected Qualifier prepareQualifier( protected Qualifier prepareQualifier(
final Node node, final String xpath, final String schemeId, final String schemeName) { final Node node,
final String xpath,
final String schemeId,
final String schemeName) {
final String classId = node.valueOf(xpath); final String classId = node.valueOf(xpath);
final String className = code2name.get(classId); final String className = code2name.get(classId);
return qualifier(classId, className, schemeId, schemeName); return qualifier(classId, className, schemeId, schemeName);
@ -371,15 +339,16 @@ public abstract class AbstractMdRecordToOafMapper {
final Node n = (Node) o; final Node n = (Node) o;
final String classId = n.valueOf(xpathClassId); final String classId = n.valueOf(xpathClassId);
final String className = code2name.get(classId); final String className = code2name.get(classId);
res.add( res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info));
structuredProperty(
n.getText(), classId, className, schemeId, schemeName, info));
} }
return res; return res;
} }
protected List<StructuredProperty> prepareListStructProps( protected List<StructuredProperty> prepareListStructProps(
final Node node, final String xpath, final Qualifier qualifier, final DataInfo info) { final Node node,
final String xpath,
final Qualifier qualifier,
final DataInfo info) {
final List<StructuredProperty> res = new ArrayList<>(); final List<StructuredProperty> res = new ArrayList<>();
for (final Object o : node.selectNodes(xpath)) { for (final Object o : node.selectNodes(xpath)) {
final Node n = (Node) o; final Node n = (Node) o;
@ -389,52 +358,37 @@ public abstract class AbstractMdRecordToOafMapper {
} }
protected List<StructuredProperty> prepareListStructProps( protected List<StructuredProperty> prepareListStructProps(
final Node node, final String xpath, final DataInfo info) { final Node node,
final String xpath,
final DataInfo info) {
final List<StructuredProperty> res = new ArrayList<>(); final List<StructuredProperty> res = new ArrayList<>();
for (final Object o : node.selectNodes(xpath)) { for (final Object o : node.selectNodes(xpath)) {
final Node n = (Node) o; final Node n = (Node) o;
res.add( res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n.valueOf("@schemename"), info));
structuredProperty(
n.getText(),
n.valueOf("@classid"),
n.valueOf("@classname"),
n.valueOf("@schemeid"),
n.valueOf("@schemename"),
info));
} }
return res; return res;
} }
protected OAIProvenance prepareOAIprovenance(final Document doc) { protected OAIProvenance prepareOAIprovenance(final Document doc) {
final Node n = final Node n =
doc.selectSingleNode( doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
"//*[local-name()='provenance']/*[local-name()='originDescription']");
if (n == null) { if (n == null) { return null; }
return null;
}
final String identifier = n.valueOf("./*[local-name()='identifier']"); final String identifier = n.valueOf("./*[local-name()='identifier']");
final String baseURL = n.valueOf("./*[local-name()='baseURL']"); final String baseURL = n.valueOf("./*[local-name()='baseURL']");;
; final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");;
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");
;
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true"); final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
final String datestamp = n.valueOf("./*[local-name()='datestamp']"); final String datestamp = n.valueOf("./*[local-name()='datestamp']");;
; final String harvestDate = n.valueOf("@harvestDate");;
final String harvestDate = n.valueOf("@harvestDate");
;
return oaiIProvenance( return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
} }
protected DataInfo prepareDataInfo(final Document doc) { protected DataInfo prepareDataInfo(final Document doc) {
final Node n = doc.selectSingleNode("//oaf:datainfo"); final Node n = doc.selectSingleNode("//oaf:datainfo");
if (n == null) { if (n == null) { return dataInfo(false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); }
return dataInfo(false, null, false, false, REPOSITORY_QUALIFIER, "0.9");
}
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname"); final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
@ -447,13 +401,7 @@ public abstract class AbstractMdRecordToOafMapper {
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred")); final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
final String trust = n.valueOf("./oaf:trust"); final String trust = n.valueOf("./oaf:trust");
return dataInfo( return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust);
deletedbyinference,
inferenceprovenance,
inferred,
false,
qualifier(paClassId, paClassName, paSchemeId, paSchemeName),
trust);
} }
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) { protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
@ -461,7 +409,9 @@ public abstract class AbstractMdRecordToOafMapper {
} }
protected List<Field<String>> prepareListFields( protected List<Field<String>> prepareListFields(
final Node node, final String xpath, final DataInfo info) { final Node node,
final String xpath,
final DataInfo info) {
return listFields(info, prepareListString(node, xpath)); return listFields(info, prepareListString(node, xpath));
} }

View File

@ -10,9 +10,27 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import java.io.Closeable;
import java.io.IOException;
import java.sql.Array;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Function;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication; import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient; import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
@ -30,32 +48,10 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.Software;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import java.io.Closeable;
import java.io.IOException;
import java.sql.Array;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Function;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class MigrateDbEntitiesApplication extends AbstractMigrationApplication public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
implements Closeable { implements Closeable {
private static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
qualifier(
"sysimport:crosswalk:entityregistry",
"sysimport:crosswalk:entityregistry",
"dnet:provenance_actions",
"dnet:provenance_actions");
private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class);
private final DbClient dbClient; private final DbClient dbClient;
@ -65,9 +61,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
public static void main(final String[] args) throws Exception { public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = final ArgumentApplicationParser parser =
new ArgumentApplicationParser( new ArgumentApplicationParser(
IOUtils.toString( IOUtils.toString(MigrateDbEntitiesApplication.class
MigrateDbEntitiesApplication.class.getResourceAsStream( .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
"/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
@ -96,8 +91,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization); smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
log.info("Processing relations ds <-> orgs ..."); log.info("Processing relations ds <-> orgs ...");
smdbe.execute( smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
"queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
log.info("Processing projects <-> orgs ..."); log.info("Processing projects <-> orgs ...");
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization); smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
@ -123,8 +117,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer) public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
throws Exception { throws Exception {
final String sql = final String sql =
IOUtils.toString( IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf)); final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
@ -141,18 +134,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true)); ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
ds.setCollectedfrom( ds.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
listKeyValues(
createOpenaireId(10, rs.getString("collectedfromid"), true),
rs.getString("collectedfromname")));
ds.setPid(new ArrayList<>()); ds.setPid(new ArrayList<>());
ds.setDateofcollection(asString(rs.getDate("dateofcollection"))); ds.setDateofcollection(asString(rs.getDate("dateofcollection")));
ds.setDateoftransformation(null); // Value not returned by the SQL query ds.setDateoftransformation(null); // Value not returned by the SQL query
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
ds.setOaiprovenance(null); // Values not present in the DB ds.setOaiprovenance(null); // Values not present in the DB
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype"))); ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
ds.setOpenairecompatibility( ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility")));
prepareQualifierSplitting(rs.getString("openairecompatibility")));
ds.setOfficialname(field(rs.getString("officialname"), info)); ds.setOfficialname(field(rs.getString("officialname"), info));
ds.setEnglishname(field(rs.getString("englishname"), info)); ds.setEnglishname(field(rs.getString("englishname"), info));
ds.setWebsiteurl(field(rs.getString("websiteurl"), info)); ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
@ -185,11 +174,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
ds.setPidsystems(field(rs.getString("pidsystems"), info)); ds.setPidsystems(field(rs.getString("pidsystems"), info));
ds.setCertificates(field(rs.getString("certificates"), info)); ds.setCertificates(field(rs.getString("certificates"), info));
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
ds.setJournal( ds.setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal
prepareJournal(
rs.getString("officialname"),
rs.getString("journal"),
info)); // Journal
ds.setDataInfo(info); ds.setDataInfo(info);
ds.setLastupdatetimestamp(lastUpdateTimestamp); ds.setLastupdatetimestamp(lastUpdateTimestamp);
@ -208,10 +193,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
p.setId(createOpenaireId(40, rs.getString("projectid"), true)); p.setId(createOpenaireId(40, rs.getString("projectid"), true));
p.setOriginalId(Arrays.asList(rs.getString("projectid"))); p.setOriginalId(Arrays.asList(rs.getString("projectid")));
p.setCollectedfrom( p.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
listKeyValues(
createOpenaireId(10, rs.getString("collectedfromid"), true),
rs.getString("collectedfromname")));
p.setPid(new ArrayList<>()); p.setPid(new ArrayList<>());
p.setDateofcollection(asString(rs.getDate("dateofcollection"))); p.setDateofcollection(asString(rs.getDate("dateofcollection")));
p.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); p.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
@ -227,8 +209,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
p.setKeywords(field(rs.getString("keywords"), info)); p.setKeywords(field(rs.getString("keywords"), info));
p.setDuration(field(Integer.toString(rs.getInt("duration")), info)); p.setDuration(field(Integer.toString(rs.getInt("duration")), info));
p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info)); p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info));
p.setOamandatepublications( p.setOamandatepublications(field(Boolean.toString(rs.getBoolean("oamandatepublications")), info));
field(Boolean.toString(rs.getBoolean("oamandatepublications")), info));
p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info));
p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info));
p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info)); p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info));
@ -264,10 +245,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
o.setId(createOpenaireId(20, rs.getString("organizationid"), true)); o.setId(createOpenaireId(20, rs.getString("organizationid"), true));
o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); o.setOriginalId(Arrays.asList(rs.getString("organizationid")));
o.setCollectedfrom( o.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
listKeyValues(
createOpenaireId(10, rs.getString("collectedfromid"), true),
rs.getString("collectedfromname")));
o.setPid(new ArrayList<>()); o.setPid(new ArrayList<>());
o.setDateofcollection(asString(rs.getDate("dateofcollection"))); o.setDateofcollection(asString(rs.getDate("dateofcollection")));
o.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); o.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
@ -281,17 +259,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
o.setEclegalbody(field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); o.setEclegalbody(field(Boolean.toString(rs.getBoolean("eclegalbody")), info));
o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info)); o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info));
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info)); o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
o.setEcresearchorganization( o.setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
o.setEchighereducation( o.setEcinternationalorganizationeurinterests(field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
field(Boolean.toString(rs.getBoolean("echighereducation")), info)); o.setEcinternationalorganization(field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
o.setEcinternationalorganizationeurinterests(
field(
Boolean.toString(
rs.getBoolean("ecinternationalorganizationeurinterests")),
info));
o.setEcinternationalorganization(
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
@ -311,9 +282,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final String orgId = createOpenaireId(20, rs.getString("organization"), true); final String orgId = createOpenaireId(20, rs.getString("organization"), true);
final String dsId = createOpenaireId(10, rs.getString("datasource"), true); final String dsId = createOpenaireId(10, rs.getString("datasource"), true);
final List<KeyValue> collectedFrom = final List<KeyValue> collectedFrom =
listKeyValues( listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
createOpenaireId(10, rs.getString("collectedfromid"), true),
rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("datasourceOrganization"); r1.setRelType("datasourceOrganization");
@ -347,9 +316,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final String orgId = createOpenaireId(20, rs.getString("resporganization"), true); final String orgId = createOpenaireId(20, rs.getString("resporganization"), true);
final String projectId = createOpenaireId(40, rs.getString("project"), true); final String projectId = createOpenaireId(40, rs.getString("project"), true);
final List<KeyValue> collectedFrom = final List<KeyValue> collectedFrom =
listKeyValues( listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
createOpenaireId(10, rs.getString("collectedfromid"), true),
rs.getString("collectedfromname"));
final Relation r1 = new Relation(); final Relation r1 = new Relation();
r1.setRelType("projectOrganization"); r1.setRelType("projectOrganization");
@ -380,17 +347,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
public List<Oaf> processClaims(final ResultSet rs) { public List<Oaf> processClaims(final ResultSet rs) {
final DataInfo info = final DataInfo info =
dataInfo( dataInfo(false, null, false, false, qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9");
false,
null,
false,
false,
qualifier(
"user:claim",
"user:claim",
"dnet:provenanceActions",
"dnet:provenanceActions"),
"0.9");
final List<KeyValue> collectedFrom = final List<KeyValue> collectedFrom =
listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE"); listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
@ -402,12 +359,16 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
if (rs.getString("target_type").equals("dataset")) { if (rs.getString("target_type").equals("dataset")) {
r = new Dataset(); r = new Dataset();
r.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
} else if (rs.getString("target_type").equals("software")) { } else if (rs.getString("target_type").equals("software")) {
r = new Software(); r = new Software();
r.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
} else if (rs.getString("target_type").equals("other")) { } else if (rs.getString("target_type").equals("other")) {
r = new OtherResearchProduct(); r = new OtherResearchProduct();
r.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
} else { } else {
r = new Publication(); r = new Publication();
r.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
} }
r.setId(createOpenaireId(50, rs.getString("target_id"), false)); r.setId(createOpenaireId(50, rs.getString("target_id"), false));
r.setLastupdatetimestamp(lastUpdateTimestamp); r.setLastupdatetimestamp(lastUpdateTimestamp);
@ -418,11 +379,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
return Arrays.asList(r); return Arrays.asList(r);
} else { } else {
final String sourceId = final String sourceId =
createOpenaireId( createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false);
rs.getString("source_type"), rs.getString("source_id"), false);
final String targetId = final String targetId =
createOpenaireId( createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false);
rs.getString("target_type"), rs.getString("target_id"), false);
final Relation r1 = new Relation(); final Relation r1 = new Relation();
final Relation r2 = new Relation(); final Relation r2 = new Relation();
@ -479,19 +438,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final String inferenceprovenance = rs.getString("inferenceprovenance"); final String inferenceprovenance = rs.getString("inferenceprovenance");
final Boolean inferred = rs.getBoolean("inferred"); final Boolean inferred = rs.getBoolean("inferred");
final String trust = rs.getString("trust"); final String trust = rs.getString("trust");
return dataInfo( return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust);
deletedbyinference,
inferenceprovenance,
inferred,
false,
ENTITYREGISTRY_PROVENANCE_ACTION,
trust);
} }
private Qualifier prepareQualifierSplitting(final String s) { private Qualifier prepareQualifierSplitting(final String s) {
if (StringUtils.isBlank(s)) { if (StringUtils.isBlank(s)) { return null; }
return null;
}
final String[] arr = s.split("@@@"); final String[] arr = s.split("@@@");
return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null; return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null;
} }
@ -507,22 +458,19 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
} }
private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) { private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) {
if (StringUtils.isBlank(s)) { if (StringUtils.isBlank(s)) { return null; }
return null;
}
final String[] parts = s.split("###"); final String[] parts = s.split("###");
if (parts.length == 2) { if (parts.length == 2) {
final String value = parts[0]; final String value = parts[0];
final String[] arr = parts[1].split("@@@"); final String[] arr = parts[1].split("@@@");
if (arr.length == 4) { if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); }
return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo);
}
} }
return null; return null;
} }
private List<StructuredProperty> prepareListOfStructProps( private List<StructuredProperty> prepareListOfStructProps(
final Array array, final DataInfo dataInfo) throws SQLException { final Array array,
final DataInfo dataInfo) throws SQLException {
final List<StructuredProperty> res = new ArrayList<>(); final List<StructuredProperty> res = new ArrayList<>();
if (array != null) { if (array != null) {
for (final String s : (String[]) array.getArray()) { for (final String s : (String[]) array.getArray()) {
@ -541,14 +489,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
final String[] arr = sj.split("@@@"); final String[] arr = sj.split("@@@");
if (arr.length == 3) { if (arr.length == 3) {
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null;
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null; final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;;
; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;;
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;
;
if (issn != null || eissn != null || lissn != null) { if (issn != null || eissn != null || lissn != null) {
return journal( return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
name, issn, eissn, eissn, null, null, null, null, null, null, null,
info);
} }
} }
} }

View File

@ -0,0 +1,22 @@
package eu.dnetlib.dhp.oa.graph.raw.common;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class MigrationConstants {
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
qualifier("sysimport:crosswalk:repository", "sysimport:crosswalk:repository", "dnet:provenanceActions", "dnet:provenanceActions");
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenanceActions", "dnet:provenanceActions");
}

View File

@ -21,6 +21,28 @@
<name>sparkExecutorCores</name> <name>sparkExecutorCores</name>
<description>number of cores used by single executor</description> <description>number of cores used by single executor</description>
</property> </property>
<property>
<name>oozieActionShareLibForSpark2</name>
<description>oozie action sharelib for spark 2.*</description>
</property>
<property>
<name>spark2ExtraListeners</name>
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
<description>spark 2.* extra listeners classname</description>
</property>
<property>
<name>spark2SqlQueryExecutionListeners</name>
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
<description>spark 2.* sql query execution listeners classname</description>
</property>
<property>
<name>spark2YarnHistoryServerAddress</name>
<description>spark 2.* yarn history server address</description>
</property>
<property>
<name>spark2EventLogDir</name>
<description>spark 2.* event log dir location</description>
</property>
</parameters> </parameters>
<global> <global>
@ -35,6 +57,10 @@
<name>oozie.launcher.mapred.job.queue.name</name> <name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value> <value>${oozieLauncherQueueName}</value>
</property> </property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration> </configuration>
</global> </global>
@ -52,14 +78,15 @@
<class>eu.dnetlib.dhp.oa.graph.GraphHiveImporterJob</class> <class>eu.dnetlib.dhp.oa.graph.GraphHiveImporterJob</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar> <jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-memory ${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory} --driver-memory=${sparkDriverMemory}
--conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts> </spark-opts>
<arg>-mt</arg> <arg>yarn-cluster</arg>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--hive_db_name</arg><arg>${hive_db_name}</arg> <arg>--hive_db_name</arg><arg>${hive_db_name}</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>

View File

@ -11,11 +11,13 @@ import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
import eu.dnetlib.dhp.oa.provision.model.SortableRelation; import eu.dnetlib.dhp.oa.provision.model.SortableRelation;
import eu.dnetlib.dhp.schema.common.EntityType; import eu.dnetlib.dhp.schema.common.EntityType;
import eu.dnetlib.dhp.schema.common.ModelSupport; import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.OafEntity; import eu.dnetlib.dhp.schema.oaf.*;
import java.util.List;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
@ -104,16 +106,12 @@ public class CreateRelatedEntitiesJob_phase1 {
SparkSession spark, SparkSession spark,
String inputRelationsPath, String inputRelationsPath,
String inputEntityPath, String inputEntityPath,
Class<E> entityClazz, Class<E> clazz,
String outputPath) { String outputPath) {
Dataset<Tuple2<String, SortableRelation>> relsByTarget = Dataset<Tuple2<String, SortableRelation>> relsByTarget =
readPathRelation(spark, inputRelationsPath) readPathRelation(spark, inputRelationsPath)
.filter( .filter("dataInfo.deletedbyinference == false")
(FilterFunction<SortableRelation>)
value ->
value.getDataInfo().getDeletedbyinference()
== false)
.map( .map(
(MapFunction<SortableRelation, Tuple2<String, SortableRelation>>) (MapFunction<SortableRelation, Tuple2<String, SortableRelation>>)
r -> new Tuple2<>(r.getTarget(), r), r -> new Tuple2<>(r.getTarget(), r),
@ -122,10 +120,11 @@ public class CreateRelatedEntitiesJob_phase1 {
.cache(); .cache();
Dataset<Tuple2<String, RelatedEntity>> entities = Dataset<Tuple2<String, RelatedEntity>> entities =
readPathEntity(spark, inputEntityPath, entityClazz) readPathEntity(spark, inputEntityPath, clazz)
.filter("dataInfo.invisible == false")
.map( .map(
(MapFunction<E, RelatedEntity>) (MapFunction<E, RelatedEntity>)
value -> asRelatedEntity(value, entityClazz), value -> asRelatedEntity(value, clazz),
Encoders.bean(RelatedEntity.class)) Encoders.bean(RelatedEntity.class))
.map( .map(
(MapFunction<RelatedEntity, Tuple2<String, RelatedEntity>>) (MapFunction<RelatedEntity, Tuple2<String, RelatedEntity>>)
@ -146,7 +145,7 @@ public class CreateRelatedEntitiesJob_phase1 {
Encoders.bean(EntityRelEntity.class)) Encoders.bean(EntityRelEntity.class))
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.parquet(outputPath + "/" + EntityType.fromClass(entityClazz)); .parquet(outputPath + "/" + EntityType.fromClass(clazz));
} }
private static <E extends OafEntity> Dataset<E> readPathEntity( private static <E extends OafEntity> Dataset<E> readPathEntity(
@ -161,6 +160,81 @@ public class CreateRelatedEntitiesJob_phase1 {
Encoders.bean(entityClazz)); Encoders.bean(entityClazz));
} }
public static <E extends OafEntity> RelatedEntity asRelatedEntity(E entity, Class<E> clazz) {
final RelatedEntity re = new RelatedEntity();
re.setId(entity.getId());
re.setType(EntityType.fromClass(clazz).name());
re.setPid(entity.getPid());
re.setCollectedfrom(entity.getCollectedfrom());
switch (EntityType.fromClass(clazz)) {
case publication:
case dataset:
case otherresearchproduct:
case software:
Result result = (Result) entity;
if (result.getTitle() != null && !result.getTitle().isEmpty()) {
re.setTitle(result.getTitle().stream().findFirst().get());
}
re.setDateofacceptance(getValue(result.getDateofacceptance()));
re.setPublisher(getValue(result.getPublisher()));
re.setResulttype(result.getResulttype());
re.setInstances(result.getInstance());
// TODO still to be mapped
// re.setCodeRepositoryUrl(j.read("$.coderepositoryurl"));
break;
case datasource:
Datasource d = (Datasource) entity;
re.setOfficialname(getValue(d.getOfficialname()));
re.setWebsiteurl(getValue(d.getWebsiteurl()));
re.setDatasourcetype(d.getDatasourcetype());
re.setOpenairecompatibility(d.getOpenairecompatibility());
break;
case organization:
Organization o = (Organization) entity;
re.setLegalname(getValue(o.getLegalname()));
re.setLegalshortname(getValue(o.getLegalshortname()));
re.setCountry(o.getCountry());
re.setWebsiteurl(getValue(o.getWebsiteurl()));
break;
case project:
Project p = (Project) entity;
re.setProjectTitle(getValue(p.getTitle()));
re.setCode(getValue(p.getCode()));
re.setAcronym(getValue(p.getAcronym()));
re.setContracttype(p.getContracttype());
List<Field<String>> f = p.getFundingtree();
if (!f.isEmpty()) {
re.setFundingtree(
f.stream().map(s -> s.getValue()).collect(Collectors.toList()));
}
break;
}
return re;
}
private static String getValue(Field<String> field) {
return getFieldValueWithDefault(field, "");
}
private static <T> T getFieldValueWithDefault(Field<T> f, T defaultValue) {
return Optional.ofNullable(f)
.filter(Objects::nonNull)
.map(x -> x.getValue())
.orElse(defaultValue);
}
/** /**
* Reads a Dataset of eu.dnetlib.dhp.oa.provision.model.SortableRelation objects from a newline * Reads a Dataset of eu.dnetlib.dhp.oa.provision.model.SortableRelation objects from a newline
* delimited json text file, * delimited json text file,

View File

@ -76,9 +76,6 @@ public class PrepareRelationsJob {
String outputPath = parser.get("outputPath"); String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath); log.info("outputPath: {}", outputPath);
int numPartitions = Integer.parseInt(parser.get("relPartitions"));
log.info("relPartitions: {}", numPartitions);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
runWithSparkSession( runWithSparkSession(
@ -86,27 +83,14 @@ public class PrepareRelationsJob {
isSparkSessionManaged, isSparkSessionManaged,
spark -> { spark -> {
removeOutputDir(spark, outputPath); removeOutputDir(spark, outputPath);
prepareRelationsFromPaths(spark, inputRelationsPath, outputPath, numPartitions); prepareRelationsFromPaths(spark, inputRelationsPath, outputPath);
}); });
} }
private static void prepareRelationsFromPaths( private static void prepareRelationsFromPaths(
SparkSession spark, String inputRelationsPath, String outputPath, int numPartitions) { SparkSession spark, String inputRelationsPath, String outputPath) {
readPathRelation(spark, inputRelationsPath) readPathRelation(spark, inputRelationsPath)
.filter( .filter("dataInfo.deletedbyinference == false")
(FilterFunction<SortableRelation>)
r -> {
try {
return r != null
&& r.getDataInfo() != null
&& !r.getDataInfo().getDeletedbyinference();
} catch (NullPointerException e) {
log.info(
"invalid NPE '{}'",
OBJECT_MAPPER.writeValueAsString(r));
throw e;
}
})
.groupByKey( .groupByKey(
(MapFunction<SortableRelation, String>) value -> value.getSource(), (MapFunction<SortableRelation, String>) value -> value.getSource(),
Encoders.STRING()) Encoders.STRING())
@ -114,7 +98,6 @@ public class PrepareRelationsJob {
(FlatMapGroupsFunction<String, SortableRelation, SortableRelation>) (FlatMapGroupsFunction<String, SortableRelation, SortableRelation>)
(key, values) -> Iterators.limit(values, MAX_RELS), (key, values) -> Iterators.limit(values, MAX_RELS),
Encoders.bean(SortableRelation.class)) Encoders.bean(SortableRelation.class))
.repartition(numPartitions)
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.parquet(outputPath); .parquet(outputPath);

View File

@ -3,14 +3,8 @@ package eu.dnetlib.dhp.oa.provision.utils;
import static org.apache.commons.lang3.StringUtils.substringAfter; import static org.apache.commons.lang3.StringUtils.substringAfter;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
import eu.dnetlib.dhp.schema.common.EntityType;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
public class GraphMappingUtils { public class GraphMappingUtils {
@ -18,81 +12,6 @@ public class GraphMappingUtils {
public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier"); public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier");
public static <E extends OafEntity> RelatedEntity asRelatedEntity(E entity, Class<E> clazz) {
final RelatedEntity re = new RelatedEntity();
re.setId(entity.getId());
re.setType(EntityType.fromClass(clazz).name());
re.setPid(entity.getPid());
re.setCollectedfrom(entity.getCollectedfrom());
switch (EntityType.fromClass(clazz)) {
case publication:
case dataset:
case otherresearchproduct:
case software:
Result result = (Result) entity;
if (result.getTitle() == null && !result.getTitle().isEmpty()) {
re.setTitle(result.getTitle().stream().findFirst().get());
}
re.setDateofacceptance(getValue(result.getDateofacceptance()));
re.setPublisher(getValue(result.getPublisher()));
re.setResulttype(result.getResulttype());
re.setInstances(result.getInstance());
// TODO still to be mapped
// re.setCodeRepositoryUrl(j.read("$.coderepositoryurl"));
break;
case datasource:
Datasource d = (Datasource) entity;
re.setOfficialname(getValue(d.getOfficialname()));
re.setWebsiteurl(getValue(d.getWebsiteurl()));
re.setDatasourcetype(d.getDatasourcetype());
re.setOpenairecompatibility(d.getOpenairecompatibility());
break;
case organization:
Organization o = (Organization) entity;
re.setLegalname(getValue(o.getLegalname()));
re.setLegalshortname(getValue(o.getLegalshortname()));
re.setCountry(o.getCountry());
re.setWebsiteurl(getValue(o.getWebsiteurl()));
break;
case project:
Project p = (Project) entity;
re.setProjectTitle(getValue(p.getTitle()));
re.setCode(getValue(p.getCode()));
re.setAcronym(getValue(p.getAcronym()));
re.setContracttype(p.getContracttype());
List<Field<String>> f = p.getFundingtree();
if (!f.isEmpty()) {
re.setFundingtree(
f.stream().map(s -> s.getValue()).collect(Collectors.toList()));
}
break;
}
return re;
}
private static String getValue(Field<String> field) {
return getFieldValueWithDefault(field, "");
}
private static <T> T getFieldValueWithDefault(Field<T> f, T defaultValue) {
return Optional.ofNullable(f)
.filter(Objects::nonNull)
.map(x -> x.getValue())
.orElse(defaultValue);
}
public static String removePrefix(final String s) { public static String removePrefix(final String s) {
if (s.contains("|")) return substringAfter(s, "|"); if (s.contains("|")) return substringAfter(s, "|");
return s; return s;

View File

@ -98,6 +98,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
</spark-opts> </spark-opts>
<arg>--inputRelationsPath</arg><arg>${inputGraphRootPath}/relation</arg> <arg>--inputRelationsPath</arg><arg>${inputGraphRootPath}/relation</arg>
<arg>--outputPath</arg><arg>${workingDir}/relation</arg> <arg>--outputPath</arg><arg>${workingDir}/relation</arg>

View File

@ -261,7 +261,8 @@
sparkDriverMemory,sparkExecutorMemory,sparkExecutorCores, sparkDriverMemory,sparkExecutorMemory,sparkExecutorCores,
oozie.wf.application.path,projectVersion,oozie.use.system.libpath, oozie.wf.application.path,projectVersion,oozie.use.system.libpath,
oozieActionShareLibForSpark1,spark1YarnHistoryServerAddress,spark1EventLogDir, oozieActionShareLibForSpark1,spark1YarnHistoryServerAddress,spark1EventLogDir,
oozieActionShareLibForSpark2,spark2YarnHistoryServerAddress,spark2EventLogDir oozieActionShareLibForSpark2,spark2YarnHistoryServerAddress,spark2EventLogDir,
sparkSqlWarehouseDir
</include> </include>
<includeSystemProperties>true</includeSystemProperties> <includeSystemProperties>true</includeSystemProperties>
<includePropertyKeysFromFiles> <includePropertyKeysFromFiles>