forked from D-Net/dnet-hadoop
Merge remote-tracking branch 'origin/master' into doiboost
This commit is contained in:
commit
bb6c9785b4
|
@ -11,8 +11,6 @@ import eu.dnetlib.pace.config.DedupConfig;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.dom4j.DocumentException;
|
import org.dom4j.DocumentException;
|
||||||
|
@ -72,12 +70,9 @@ public class SparkCreateDedupRecord extends AbstractSparkAction {
|
||||||
Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
||||||
|
|
||||||
DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz)
|
DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz)
|
||||||
.map(
|
|
||||||
(MapFunction<OafEntity, String>)
|
|
||||||
value -> OBJECT_MAPPER.writeValueAsString(value),
|
|
||||||
Encoders.STRING())
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
.json(outputPath);
|
.json(outputPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(SparkUpdateEntity.class);
|
private static final Logger log = LoggerFactory.getLogger(SparkUpdateEntity.class);
|
||||||
|
|
||||||
final String IDJSONPATH = "$.id";
|
private static final String IDJSONPATH = "$.id";
|
||||||
|
|
||||||
public SparkUpdateEntity(ArgumentApplicationParser parser, SparkSession spark) {
|
public SparkUpdateEntity(ArgumentApplicationParser parser, SparkSession spark) {
|
||||||
super(parser, spark);
|
super(parser, spark);
|
||||||
|
@ -65,27 +65,25 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
log.info("workingPath: '{}'", workingPath);
|
log.info("workingPath: '{}'", workingPath);
|
||||||
log.info("dedupGraphPath: '{}'", dedupGraphPath);
|
log.info("dedupGraphPath: '{}'", dedupGraphPath);
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
// for each entity
|
// for each entity
|
||||||
ModelSupport.entityTypes.forEach(
|
ModelSupport.entityTypes.forEach(
|
||||||
(entity, clazz) -> {
|
(type, clazz) -> {
|
||||||
final String outputPath = dedupGraphPath + "/" + entity;
|
final String outputPath = dedupGraphPath + "/" + type;
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
|
|
||||||
JavaRDD<String> sourceEntity =
|
JavaRDD<String> sourceEntity =
|
||||||
sc.textFile(
|
sc.textFile(
|
||||||
DedupUtility.createEntityPath(
|
DedupUtility.createEntityPath(graphBasePath, type.toString()));
|
||||||
graphBasePath, entity.toString()));
|
|
||||||
|
|
||||||
if (mergeRelExists(workingPath, entity.toString())) {
|
if (mergeRelExists(workingPath, type.toString())) {
|
||||||
|
|
||||||
final String mergeRelPath =
|
final String mergeRelPath =
|
||||||
DedupUtility.createMergeRelPath(
|
DedupUtility.createMergeRelPath(workingPath, "*", type.toString());
|
||||||
workingPath, "*", entity.toString());
|
|
||||||
final String dedupRecordPath =
|
final String dedupRecordPath =
|
||||||
DedupUtility.createDedupRecordPath(
|
DedupUtility.createDedupRecordPath(
|
||||||
workingPath, "*", entity.toString());
|
workingPath, "*", type.toString());
|
||||||
|
|
||||||
final Dataset<Relation> rel =
|
final Dataset<Relation> rel =
|
||||||
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||||
|
@ -107,7 +105,6 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
MapDocumentUtil.getJPathString(
|
MapDocumentUtil.getJPathString(
|
||||||
IDJSONPATH, s),
|
IDJSONPATH, s),
|
||||||
s));
|
s));
|
||||||
|
|
||||||
JavaRDD<String> map =
|
JavaRDD<String> map =
|
||||||
entitiesWithId
|
entitiesWithId
|
||||||
.leftOuterJoin(mergedIds)
|
.leftOuterJoin(mergedIds)
|
||||||
|
|
|
@ -72,7 +72,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
spark =
|
spark =
|
||||||
SparkSession.builder()
|
SparkSession.builder()
|
||||||
.appName(SparkCreateSimRels.class.getSimpleName())
|
.appName(SparkDedupTest.class.getSimpleName())
|
||||||
.master("local[*]")
|
.master("local[*]")
|
||||||
.config(new SparkConf())
|
.config(new SparkConf())
|
||||||
.getOrCreate();
|
.getOrCreate();
|
||||||
|
@ -272,7 +272,17 @@ public class SparkDedupTest implements Serializable {
|
||||||
.distinct()
|
.distinct()
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(831, publications);
|
long mergedSw =
|
||||||
|
spark.read()
|
||||||
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
||||||
|
.as(Encoders.bean(Relation.class))
|
||||||
|
.where("relClass=='merges'")
|
||||||
|
.javaRDD()
|
||||||
|
.map(Relation::getTarget)
|
||||||
|
.distinct()
|
||||||
|
.count();
|
||||||
|
|
||||||
|
assertEquals(897, publications);
|
||||||
assertEquals(835, organizations);
|
assertEquals(835, organizations);
|
||||||
assertEquals(100, projects);
|
assertEquals(100, projects);
|
||||||
assertEquals(100, datasource);
|
assertEquals(100, datasource);
|
||||||
|
@ -288,8 +298,14 @@ public class SparkDedupTest implements Serializable {
|
||||||
.filter(this::isDeletedByInference)
|
.filter(this::isDeletedByInference)
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
|
long deletedSw =
|
||||||
|
jsc.textFile(testDedupGraphBasePath + "/software")
|
||||||
|
.filter(this::isDeletedByInference)
|
||||||
|
.count();
|
||||||
|
|
||||||
assertEquals(mergedOrgs, deletedOrgs);
|
assertEquals(mergedOrgs, deletedOrgs);
|
||||||
assertEquals(mergedPubs, deletedPubs);
|
assertEquals(mergedPubs, deletedPubs);
|
||||||
|
assertEquals(mergedSw, deletedSw);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -10,6 +10,20 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentFactory;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
@ -27,452 +41,388 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dom4j.Document;
|
|
||||||
import org.dom4j.DocumentFactory;
|
|
||||||
import org.dom4j.DocumentHelper;
|
|
||||||
import org.dom4j.Node;
|
|
||||||
|
|
||||||
public abstract class AbstractMdRecordToOafMapper {
|
public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
protected final Map<String, String> code2name;
|
protected final Map<String, String> code2name;
|
||||||
|
|
||||||
protected static final Qualifier MAIN_TITLE_QUALIFIER =
|
protected static final Qualifier MAIN_TITLE_QUALIFIER =
|
||||||
qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
|
qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
|
||||||
|
|
||||||
protected static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) {
|
||||||
qualifier(
|
this.code2name = code2name;
|
||||||
"publication",
|
}
|
||||||
"publication",
|
|
||||||
"dnet:result_typologies",
|
public List<Oaf> processMdRecord(final String xml) {
|
||||||
"dnet:result_typologies");
|
try {
|
||||||
protected static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
final Map<String, String> nsContext = new HashMap<>();
|
||||||
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
|
||||||
protected static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
|
||||||
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
|
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
|
||||||
protected static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
|
||||||
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
|
||||||
protected static final Qualifier REPOSITORY_QUALIFIER =
|
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
|
||||||
qualifier(
|
nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
|
||||||
"sysimport:crosswalk:repository",
|
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
||||||
"sysimport:crosswalk:repository",
|
|
||||||
"dnet:provenanceActions",
|
final Document doc =
|
||||||
"dnet:provenanceActions");
|
DocumentHelper.parseText(xml.replaceAll("http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
|
||||||
|
|
||||||
protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) {
|
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
||||||
this.code2name = code2name;
|
final KeyValue collectedFrom =
|
||||||
}
|
keyValue(createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc.valueOf("//oaf:collectedFrom/@name"));
|
||||||
|
final KeyValue hostedBy =
|
||||||
public List<Oaf> processMdRecord(final String xml) {
|
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
||||||
try {
|
? collectedFrom
|
||||||
final Map<String, String> nsContext = new HashMap<>();
|
: keyValue(createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true), doc.valueOf("//oaf:hostedBy/@name"));
|
||||||
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
|
|
||||||
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
|
final DataInfo info = prepareDataInfo(doc);
|
||||||
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
|
final long lastUpdateTimestamp = new Date().getTime();
|
||||||
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
|
|
||||||
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
|
return createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
|
} catch (final Exception e) {
|
||||||
nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
|
throw new RuntimeException(e);
|
||||||
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
}
|
||||||
|
}
|
||||||
final Document doc =
|
|
||||||
DocumentHelper.parseText(
|
protected List<Oaf> createOafs(
|
||||||
xml.replaceAll(
|
final Document doc,
|
||||||
"http://datacite.org/schema/kernel-4",
|
final String type,
|
||||||
"http://datacite.org/schema/kernel-3"));
|
final KeyValue collectedFrom,
|
||||||
|
final KeyValue hostedBy,
|
||||||
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
final DataInfo info,
|
||||||
final KeyValue collectedFrom =
|
final long lastUpdateTimestamp) {
|
||||||
keyValue(
|
|
||||||
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true),
|
final List<Oaf> oafs = new ArrayList<>();
|
||||||
doc.valueOf("//oaf:collectedFrom/@name"));
|
|
||||||
final KeyValue hostedBy =
|
switch (type.toLowerCase()) {
|
||||||
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
case "":
|
||||||
? collectedFrom
|
case "publication":
|
||||||
: keyValue(
|
final Publication p = new Publication();
|
||||||
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
|
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
doc.valueOf("//oaf:hostedBy/@name"));
|
p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
|
||||||
|
p.setJournal(prepareJournal(doc, info));
|
||||||
final DataInfo info = prepareDataInfo(doc);
|
oafs.add(p);
|
||||||
final long lastUpdateTimestamp = new Date().getTime();
|
break;
|
||||||
|
case "dataset":
|
||||||
return createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
final Dataset d = new Dataset();
|
||||||
} catch (final Exception e) {
|
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
throw new RuntimeException(e);
|
d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
|
||||||
}
|
d.setStoragedate(prepareDatasetStorageDate(doc, info));
|
||||||
}
|
d.setDevice(prepareDatasetDevice(doc, info));
|
||||||
|
d.setSize(prepareDatasetSize(doc, info));
|
||||||
protected List<Oaf> createOafs(
|
d.setVersion(prepareDatasetVersion(doc, info));
|
||||||
final Document doc,
|
d.setLastmetadataupdate(prepareDatasetLastMetadataUpdate(doc, info));
|
||||||
final String type,
|
d.setMetadataversionnumber(prepareDatasetMetadataVersionNumber(doc, info));
|
||||||
final KeyValue collectedFrom,
|
d.setGeolocation(prepareDatasetGeoLocations(doc, info));
|
||||||
final KeyValue hostedBy,
|
oafs.add(d);
|
||||||
final DataInfo info,
|
break;
|
||||||
final long lastUpdateTimestamp) {
|
case "software":
|
||||||
|
final Software s = new Software();
|
||||||
final List<Oaf> oafs = new ArrayList<>();
|
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
|
s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
|
||||||
switch (type.toLowerCase()) {
|
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
|
||||||
case "":
|
s.setLicense(prepareSoftwareLicenses(doc, info));
|
||||||
case "publication":
|
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
|
||||||
final Publication p = new Publication();
|
s.setProgrammingLanguage(prepareSoftwareProgrammingLanguage(doc, info));
|
||||||
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
oafs.add(s);
|
||||||
p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER);
|
break;
|
||||||
p.setJournal(prepareJournal(doc, info));
|
case "otherresearchproducts":
|
||||||
oafs.add(p);
|
default:
|
||||||
break;
|
final OtherResearchProduct o = new OtherResearchProduct();
|
||||||
case "dataset":
|
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
final Dataset d = new Dataset();
|
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
|
||||||
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
|
||||||
d.setResulttype(DATASET_RESULTTYPE_QUALIFIER);
|
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
|
||||||
d.setStoragedate(prepareDatasetStorageDate(doc, info));
|
o.setTool(prepareOtherResearchProductTools(doc, info));
|
||||||
d.setDevice(prepareDatasetDevice(doc, info));
|
oafs.add(o);
|
||||||
d.setSize(prepareDatasetSize(doc, info));
|
break;
|
||||||
d.setVersion(prepareDatasetVersion(doc, info));
|
}
|
||||||
d.setLastmetadataupdate(prepareDatasetLastMetadataUpdate(doc, info));
|
|
||||||
d.setMetadataversionnumber(prepareDatasetMetadataVersionNumber(doc, info));
|
if (!oafs.isEmpty()) {
|
||||||
d.setGeolocation(prepareDatasetGeoLocations(doc, info));
|
oafs.addAll(addProjectRels(doc, collectedFrom, info, lastUpdateTimestamp));
|
||||||
oafs.add(d);
|
oafs.addAll(addOtherResultRels(doc, collectedFrom, info, lastUpdateTimestamp));
|
||||||
break;
|
}
|
||||||
case "software":
|
|
||||||
final Software s = new Software();
|
return oafs;
|
||||||
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
}
|
||||||
s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER);
|
|
||||||
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
|
private List<Oaf> addProjectRels(
|
||||||
s.setLicense(prepareSoftwareLicenses(doc, info));
|
final Document doc,
|
||||||
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
|
final KeyValue collectedFrom,
|
||||||
s.setProgrammingLanguage(prepareSoftwareProgrammingLanguage(doc, info));
|
final DataInfo info,
|
||||||
oafs.add(s);
|
final long lastUpdateTimestamp) {
|
||||||
break;
|
|
||||||
case "otherresearchproducts":
|
final List<Oaf> res = new ArrayList<>();
|
||||||
default:
|
|
||||||
final OtherResearchProduct o = new OtherResearchProduct();
|
final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false);
|
||||||
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
|
||||||
o.setResulttype(OTHER_RESULTTYPE_QUALIFIER);
|
for (final Object o : doc.selectNodes("//oaf:projectid")) {
|
||||||
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
|
final String projectId = createOpenaireId(40, ((Node) o).getText(), true);
|
||||||
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
|
|
||||||
o.setTool(prepareOtherResearchProductTools(doc, info));
|
final Relation r1 = new Relation();
|
||||||
oafs.add(o);
|
r1.setRelType("resultProject");
|
||||||
break;
|
r1.setSubRelType("outcome");
|
||||||
}
|
r1.setRelClass("isProducedBy");
|
||||||
|
r1.setSource(docId);
|
||||||
if (!oafs.isEmpty()) {
|
r1.setTarget(projectId);
|
||||||
oafs.addAll(addProjectRels(doc, collectedFrom, info, lastUpdateTimestamp));
|
r1.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
oafs.addAll(addOtherResultRels(doc, collectedFrom, info, lastUpdateTimestamp));
|
r1.setDataInfo(info);
|
||||||
}
|
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
res.add(r1);
|
||||||
return oafs;
|
|
||||||
}
|
final Relation r2 = new Relation();
|
||||||
|
r2.setRelType("resultProject");
|
||||||
private List<Oaf> addProjectRels(
|
r2.setSubRelType("outcome");
|
||||||
final Document doc,
|
r2.setRelClass("produces");
|
||||||
final KeyValue collectedFrom,
|
r2.setSource(projectId);
|
||||||
final DataInfo info,
|
r2.setTarget(docId);
|
||||||
final long lastUpdateTimestamp) {
|
r2.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
|
r2.setDataInfo(info);
|
||||||
final List<Oaf> res = new ArrayList<>();
|
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
res.add(r2);
|
||||||
final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false);
|
}
|
||||||
|
|
||||||
for (final Object o : doc.selectNodes("//oaf:projectid")) {
|
return res;
|
||||||
final String projectId = createOpenaireId(40, ((Node) o).getText(), true);
|
}
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
protected abstract List<Oaf> addOtherResultRels(
|
||||||
r1.setRelType("resultProject");
|
final Document doc,
|
||||||
r1.setSubRelType("outcome");
|
final KeyValue collectedFrom,
|
||||||
r1.setRelClass("isProducedBy");
|
final DataInfo info,
|
||||||
r1.setSource(docId);
|
final long lastUpdateTimestamp);
|
||||||
r1.setTarget(projectId);
|
|
||||||
r1.setCollectedfrom(Arrays.asList(collectedFrom));
|
private void populateResultFields(
|
||||||
r1.setDataInfo(info);
|
final Result r,
|
||||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
final Document doc,
|
||||||
res.add(r1);
|
final KeyValue collectedFrom,
|
||||||
|
final KeyValue hostedBy,
|
||||||
final Relation r2 = new Relation();
|
final DataInfo info,
|
||||||
r2.setRelType("resultProject");
|
final long lastUpdateTimestamp) {
|
||||||
r2.setSubRelType("outcome");
|
r.setDataInfo(info);
|
||||||
r2.setRelClass("produces");
|
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
r2.setSource(projectId);
|
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
|
||||||
r2.setTarget(docId);
|
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
|
||||||
r2.setCollectedfrom(Arrays.asList(collectedFrom));
|
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
r2.setDataInfo(info);
|
r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info));
|
||||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
||||||
res.add(r2);
|
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
||||||
}
|
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
|
r.setOaiprovenance(prepareOAIprovenance(doc));
|
||||||
return res;
|
r.setAuthor(prepareAuthors(doc, info));
|
||||||
}
|
r.setLanguage(prepareLanguages(doc));
|
||||||
|
r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
protected abstract List<Oaf> addOtherResultRels(
|
r.setSubject(prepareSubjects(doc, info));
|
||||||
final Document doc,
|
r.setTitle(prepareTitles(doc, info));
|
||||||
final KeyValue collectedFrom,
|
r.setRelevantdate(prepareRelevantDates(doc, info));
|
||||||
final DataInfo info,
|
r.setDescription(prepareDescriptions(doc, info));
|
||||||
final long lastUpdateTimestamp);
|
r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info));
|
||||||
|
r.setPublisher(preparePublisher(doc, info));
|
||||||
private void populateResultFields(
|
r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info));
|
||||||
final Result r,
|
r.setSource(prepareSources(doc, info));
|
||||||
final Document doc,
|
r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
final KeyValue collectedFrom,
|
r.setFormat(prepareFormats(doc, info));
|
||||||
final KeyValue hostedBy,
|
r.setContributor(prepareContributors(doc, info));
|
||||||
final DataInfo info,
|
r.setResourcetype(prepareResourceType(doc, info));
|
||||||
final long lastUpdateTimestamp) {
|
r.setCoverage(prepareCoverages(doc, info));
|
||||||
r.setDataInfo(info);
|
r.setContext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
r.setExternalReference(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
|
r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy));
|
||||||
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
|
}
|
||||||
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
|
||||||
r.setPid(
|
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
|
||||||
prepareListStructProps(
|
|
||||||
doc,
|
protected abstract List<Instance> prepareInstances(
|
||||||
"//oaf:identifier",
|
Document doc,
|
||||||
"@identifierType",
|
DataInfo info,
|
||||||
"dnet:pid_types",
|
KeyValue collectedfrom,
|
||||||
"dnet:pid_types",
|
KeyValue hostedby);
|
||||||
info));
|
|
||||||
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
|
||||||
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
|
||||||
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
protected abstract List<StructuredProperty> prepareRelevantDates(Document doc, DataInfo info);
|
||||||
r.setOaiprovenance(prepareOAIprovenance(doc));
|
|
||||||
r.setAuthor(prepareAuthors(doc, info));
|
protected abstract List<Field<String>> prepareCoverages(Document doc, DataInfo info);
|
||||||
r.setLanguage(prepareLanguages(doc));
|
|
||||||
r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
protected abstract List<Field<String>> prepareContributors(Document doc, DataInfo info);
|
||||||
r.setSubject(prepareSubjects(doc, info));
|
|
||||||
r.setTitle(prepareTitles(doc, info));
|
protected abstract List<Field<String>> prepareFormats(Document doc, DataInfo info);
|
||||||
r.setRelevantdate(prepareRelevantDates(doc, info));
|
|
||||||
r.setDescription(prepareDescriptions(doc, info));
|
protected abstract Field<String> preparePublisher(Document doc, DataInfo info);
|
||||||
r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info));
|
|
||||||
r.setPublisher(preparePublisher(doc, info));
|
protected abstract List<Field<String>> prepareDescriptions(Document doc, DataInfo info);
|
||||||
r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info));
|
|
||||||
r.setSource(prepareSources(doc, info));
|
protected abstract List<StructuredProperty> prepareTitles(Document doc, DataInfo info);
|
||||||
r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
|
||||||
r.setFormat(prepareFormats(doc, info));
|
protected abstract List<StructuredProperty> prepareSubjects(Document doc, DataInfo info);
|
||||||
r.setContributor(prepareContributors(doc, info));
|
|
||||||
r.setResourcetype(prepareResourceType(doc, info));
|
protected abstract Qualifier prepareLanguages(Document doc);
|
||||||
r.setCoverage(prepareCoverages(doc, info));
|
|
||||||
r.setContext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
|
||||||
r.setExternalReference(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
|
||||||
r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy));
|
protected abstract List<Field<String>> prepareOtherResearchProductTools(
|
||||||
}
|
Document doc,
|
||||||
|
DataInfo info);
|
||||||
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
|
|
||||||
|
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
|
||||||
protected abstract List<Instance> prepareInstances(
|
Document doc,
|
||||||
Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby);
|
DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
|
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
|
||||||
|
Document doc,
|
||||||
protected abstract List<StructuredProperty> prepareRelevantDates(Document doc, DataInfo info);
|
DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareCoverages(Document doc, DataInfo info);
|
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareContributors(Document doc, DataInfo info);
|
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareFormats(Document doc, DataInfo info);
|
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
||||||
|
Document doc,
|
||||||
protected abstract Field<String> preparePublisher(Document doc, DataInfo info);
|
DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareDescriptions(Document doc, DataInfo info);
|
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
||||||
|
Document doc,
|
||||||
protected abstract List<StructuredProperty> prepareTitles(Document doc, DataInfo info);
|
DataInfo info);
|
||||||
|
|
||||||
protected abstract List<StructuredProperty> prepareSubjects(Document doc, DataInfo info);
|
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract Qualifier prepareLanguages(Document doc);
|
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
||||||
|
Document doc,
|
||||||
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
|
DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductTools(
|
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
||||||
Document doc, DataInfo info);
|
|
||||||
|
protected abstract Field<String> prepareDatasetVersion(Document doc, DataInfo info);
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
|
|
||||||
Document doc, DataInfo info);
|
protected abstract Field<String> prepareDatasetSize(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
|
protected abstract Field<String> prepareDatasetDevice(Document doc, DataInfo info);
|
||||||
Document doc, DataInfo info);
|
|
||||||
|
protected abstract Field<String> prepareDatasetStorageDate(Document doc, DataInfo info);
|
||||||
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
|
|
||||||
|
private Journal prepareJournal(final Document doc, final DataInfo info) {
|
||||||
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
final Node n = doc.selectSingleNode("//oaf:journal");
|
||||||
|
if (n != null) {
|
||||||
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
final String name = n.getText();
|
||||||
Document doc, DataInfo info);
|
final String issnPrinted = n.valueOf("@issn");
|
||||||
|
final String issnOnline = n.valueOf("@eissn");
|
||||||
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
final String issnLinking = n.valueOf("@lissn");
|
||||||
Document doc, DataInfo info);
|
final String ep = n.valueOf("@ep");
|
||||||
|
final String iss = n.valueOf("@iss");
|
||||||
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
final String sp = n.valueOf("@sp");
|
||||||
|
final String vol = n.valueOf("@vol");
|
||||||
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
final String edition = n.valueOf("@edition");
|
||||||
Document doc, DataInfo info);
|
if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, ep, iss, sp, vol, edition, null, null, info); }
|
||||||
|
}
|
||||||
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
return null;
|
||||||
|
}
|
||||||
protected abstract Field<String> prepareDatasetVersion(Document doc, DataInfo info);
|
|
||||||
|
protected Qualifier prepareQualifier(
|
||||||
protected abstract Field<String> prepareDatasetSize(Document doc, DataInfo info);
|
final Node node,
|
||||||
|
final String xpath,
|
||||||
protected abstract Field<String> prepareDatasetDevice(Document doc, DataInfo info);
|
final String schemeId,
|
||||||
|
final String schemeName) {
|
||||||
protected abstract Field<String> prepareDatasetStorageDate(Document doc, DataInfo info);
|
final String classId = node.valueOf(xpath);
|
||||||
|
final String className = code2name.get(classId);
|
||||||
private Journal prepareJournal(final Document doc, final DataInfo info) {
|
return qualifier(classId, className, schemeId, schemeName);
|
||||||
final Node n = doc.selectSingleNode("//oaf:journal");
|
}
|
||||||
if (n != null) {
|
|
||||||
final String name = n.getText();
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
final String issnPrinted = n.valueOf("@issn");
|
final Node node,
|
||||||
final String issnOnline = n.valueOf("@eissn");
|
final String xpath,
|
||||||
final String issnLinking = n.valueOf("@lissn");
|
final String xpathClassId,
|
||||||
final String ep = n.valueOf("@ep");
|
final String schemeId,
|
||||||
final String iss = n.valueOf("@iss");
|
final String schemeName,
|
||||||
final String sp = n.valueOf("@sp");
|
final DataInfo info) {
|
||||||
final String vol = n.valueOf("@vol");
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
final String edition = n.valueOf("@edition");
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
if (StringUtils.isNotBlank(name)) {
|
final Node n = (Node) o;
|
||||||
return journal(
|
final String classId = n.valueOf(xpathClassId);
|
||||||
name,
|
final String className = code2name.get(classId);
|
||||||
issnPrinted,
|
res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info));
|
||||||
issnOnline,
|
}
|
||||||
issnLinking,
|
return res;
|
||||||
ep,
|
}
|
||||||
iss,
|
|
||||||
sp,
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
vol,
|
final Node node,
|
||||||
edition,
|
final String xpath,
|
||||||
null,
|
final Qualifier qualifier,
|
||||||
null,
|
final DataInfo info) {
|
||||||
info);
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
}
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
}
|
final Node n = (Node) o;
|
||||||
return null;
|
res.add(structuredProperty(n.getText(), qualifier, info));
|
||||||
}
|
}
|
||||||
|
return res;
|
||||||
protected Qualifier prepareQualifier(
|
}
|
||||||
final Node node, final String xpath, final String schemeId, final String schemeName) {
|
|
||||||
final String classId = node.valueOf(xpath);
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
final String className = code2name.get(classId);
|
final Node node,
|
||||||
return qualifier(classId, className, schemeId, schemeName);
|
final String xpath,
|
||||||
}
|
final DataInfo info) {
|
||||||
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
final Node node,
|
final Node n = (Node) o;
|
||||||
final String xpath,
|
res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n.valueOf("@schemename"), info));
|
||||||
final String xpathClassId,
|
}
|
||||||
final String schemeId,
|
return res;
|
||||||
final String schemeName,
|
}
|
||||||
final DataInfo info) {
|
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
final Node n =
|
||||||
final Node n = (Node) o;
|
doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
|
||||||
final String classId = n.valueOf(xpathClassId);
|
|
||||||
final String className = code2name.get(classId);
|
if (n == null) { return null; }
|
||||||
res.add(
|
|
||||||
structuredProperty(
|
final String identifier = n.valueOf("./*[local-name()='identifier']");
|
||||||
n.getText(), classId, className, schemeId, schemeName, info));
|
final String baseURL = n.valueOf("./*[local-name()='baseURL']");;
|
||||||
}
|
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");;
|
||||||
return res;
|
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
|
||||||
}
|
final String datestamp = n.valueOf("./*[local-name()='datestamp']");;
|
||||||
|
final String harvestDate = n.valueOf("@harvestDate");;
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
|
||||||
final Node node, final String xpath, final Qualifier qualifier, final DataInfo info) {
|
return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
}
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
|
||||||
final Node n = (Node) o;
|
protected DataInfo prepareDataInfo(final Document doc) {
|
||||||
res.add(structuredProperty(n.getText(), qualifier, info));
|
final Node n = doc.selectSingleNode("//oaf:datainfo");
|
||||||
}
|
|
||||||
return res;
|
if (n == null) { return dataInfo(false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); }
|
||||||
}
|
|
||||||
|
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
|
||||||
final Node node, final String xpath, final DataInfo info) {
|
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
|
||||||
final Node n = (Node) o;
|
final boolean deletedbyinference =
|
||||||
res.add(
|
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
||||||
structuredProperty(
|
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
|
||||||
n.getText(),
|
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
||||||
n.valueOf("@classid"),
|
final String trust = n.valueOf("./oaf:trust");
|
||||||
n.valueOf("@classname"),
|
|
||||||
n.valueOf("@schemeid"),
|
return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust);
|
||||||
n.valueOf("@schemename"),
|
}
|
||||||
info));
|
|
||||||
}
|
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
|
||||||
return res;
|
return field(node.valueOf(xpath), info);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
protected List<Field<String>> prepareListFields(
|
||||||
final Node n =
|
final Node node,
|
||||||
doc.selectSingleNode(
|
final String xpath,
|
||||||
"//*[local-name()='provenance']/*[local-name()='originDescription']");
|
final DataInfo info) {
|
||||||
|
return listFields(info, prepareListString(node, xpath));
|
||||||
if (n == null) {
|
}
|
||||||
return null;
|
|
||||||
}
|
protected List<String> prepareListString(final Node node, final String xpath) {
|
||||||
|
final List<String> res = new ArrayList<>();
|
||||||
final String identifier = n.valueOf("./*[local-name()='identifier']");
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
final String baseURL = n.valueOf("./*[local-name()='baseURL']");
|
final String s = ((Node) o).getText().trim();
|
||||||
;
|
if (StringUtils.isNotBlank(s)) {
|
||||||
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");
|
res.add(s);
|
||||||
;
|
}
|
||||||
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
|
}
|
||||||
final String datestamp = n.valueOf("./*[local-name()='datestamp']");
|
return res;
|
||||||
;
|
}
|
||||||
final String harvestDate = n.valueOf("@harvestDate");
|
|
||||||
;
|
|
||||||
|
|
||||||
return oaiIProvenance(
|
|
||||||
identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected DataInfo prepareDataInfo(final Document doc) {
|
|
||||||
final Node n = doc.selectSingleNode("//oaf:datainfo");
|
|
||||||
|
|
||||||
if (n == null) {
|
|
||||||
return dataInfo(false, null, false, false, REPOSITORY_QUALIFIER, "0.9");
|
|
||||||
}
|
|
||||||
|
|
||||||
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
|
||||||
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
|
|
||||||
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
|
|
||||||
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
|
|
||||||
|
|
||||||
final boolean deletedbyinference =
|
|
||||||
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
|
||||||
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
|
|
||||||
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
|
||||||
final String trust = n.valueOf("./oaf:trust");
|
|
||||||
|
|
||||||
return dataInfo(
|
|
||||||
deletedbyinference,
|
|
||||||
inferenceprovenance,
|
|
||||||
inferred,
|
|
||||||
false,
|
|
||||||
qualifier(paClassId, paClassName, paSchemeId, paSchemeName),
|
|
||||||
trust);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
|
|
||||||
return field(node.valueOf(xpath), info);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected List<Field<String>> prepareListFields(
|
|
||||||
final Node node, final String xpath, final DataInfo info) {
|
|
||||||
return listFields(info, prepareListString(node, xpath));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected List<String> prepareListString(final Node node, final String xpath) {
|
|
||||||
final List<String> res = new ArrayList<>();
|
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
|
||||||
final String s = ((Node) o).getText().trim();
|
|
||||||
if (StringUtils.isNotBlank(s)) {
|
|
||||||
res.add(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,22 @@
|
||||||
|
package eu.dnetlib.dhp.oa.graph.raw.common;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
|
|
||||||
|
public class MigrationConstants {
|
||||||
|
|
||||||
|
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
||||||
|
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
|
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
||||||
|
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
|
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
||||||
|
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
|
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
||||||
|
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
|
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
||||||
|
qualifier("sysimport:crosswalk:repository", "sysimport:crosswalk:repository", "dnet:provenanceActions", "dnet:provenanceActions");
|
||||||
|
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
||||||
|
qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenanceActions", "dnet:provenanceActions");
|
||||||
|
|
||||||
|
}
|
|
@ -21,6 +21,28 @@
|
||||||
<name>sparkExecutorCores</name>
|
<name>sparkExecutorCores</name>
|
||||||
<description>number of cores used by single executor</description>
|
<description>number of cores used by single executor</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<global>
|
<global>
|
||||||
|
@ -35,6 +57,10 @@
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
<value>${oozieLauncherQueueName}</value>
|
<value>${oozieLauncherQueueName}</value>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
|
@ -52,14 +78,15 @@
|
||||||
<class>eu.dnetlib.dhp.oa.graph.GraphHiveImporterJob</class>
|
<class>eu.dnetlib.dhp.oa.graph.GraphHiveImporterJob</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory ${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores ${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener"
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener"
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.sql.warehouse.dir="/user/hive/warehouse"
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--hive_db_name</arg><arg>${hive_db_name}</arg>
|
<arg>--hive_db_name</arg><arg>${hive_db_name}</arg>
|
||||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||||
|
|
|
@ -11,11 +11,13 @@ import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.SortableRelation;
|
import eu.dnetlib.dhp.oa.provision.model.SortableRelation;
|
||||||
import eu.dnetlib.dhp.schema.common.EntityType;
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -104,16 +106,12 @@ public class CreateRelatedEntitiesJob_phase1 {
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
String inputRelationsPath,
|
String inputRelationsPath,
|
||||||
String inputEntityPath,
|
String inputEntityPath,
|
||||||
Class<E> entityClazz,
|
Class<E> clazz,
|
||||||
String outputPath) {
|
String outputPath) {
|
||||||
|
|
||||||
Dataset<Tuple2<String, SortableRelation>> relsByTarget =
|
Dataset<Tuple2<String, SortableRelation>> relsByTarget =
|
||||||
readPathRelation(spark, inputRelationsPath)
|
readPathRelation(spark, inputRelationsPath)
|
||||||
.filter(
|
.filter("dataInfo.deletedbyinference == false")
|
||||||
(FilterFunction<SortableRelation>)
|
|
||||||
value ->
|
|
||||||
value.getDataInfo().getDeletedbyinference()
|
|
||||||
== false)
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<SortableRelation, Tuple2<String, SortableRelation>>)
|
(MapFunction<SortableRelation, Tuple2<String, SortableRelation>>)
|
||||||
r -> new Tuple2<>(r.getTarget(), r),
|
r -> new Tuple2<>(r.getTarget(), r),
|
||||||
|
@ -122,10 +120,11 @@ public class CreateRelatedEntitiesJob_phase1 {
|
||||||
.cache();
|
.cache();
|
||||||
|
|
||||||
Dataset<Tuple2<String, RelatedEntity>> entities =
|
Dataset<Tuple2<String, RelatedEntity>> entities =
|
||||||
readPathEntity(spark, inputEntityPath, entityClazz)
|
readPathEntity(spark, inputEntityPath, clazz)
|
||||||
|
.filter("dataInfo.invisible == false")
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<E, RelatedEntity>)
|
(MapFunction<E, RelatedEntity>)
|
||||||
value -> asRelatedEntity(value, entityClazz),
|
value -> asRelatedEntity(value, clazz),
|
||||||
Encoders.bean(RelatedEntity.class))
|
Encoders.bean(RelatedEntity.class))
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<RelatedEntity, Tuple2<String, RelatedEntity>>)
|
(MapFunction<RelatedEntity, Tuple2<String, RelatedEntity>>)
|
||||||
|
@ -146,7 +145,7 @@ public class CreateRelatedEntitiesJob_phase1 {
|
||||||
Encoders.bean(EntityRelEntity.class))
|
Encoders.bean(EntityRelEntity.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.parquet(outputPath + "/" + EntityType.fromClass(entityClazz));
|
.parquet(outputPath + "/" + EntityType.fromClass(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <E extends OafEntity> Dataset<E> readPathEntity(
|
private static <E extends OafEntity> Dataset<E> readPathEntity(
|
||||||
|
@ -161,6 +160,81 @@ public class CreateRelatedEntitiesJob_phase1 {
|
||||||
Encoders.bean(entityClazz));
|
Encoders.bean(entityClazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static <E extends OafEntity> RelatedEntity asRelatedEntity(E entity, Class<E> clazz) {
|
||||||
|
|
||||||
|
final RelatedEntity re = new RelatedEntity();
|
||||||
|
re.setId(entity.getId());
|
||||||
|
re.setType(EntityType.fromClass(clazz).name());
|
||||||
|
|
||||||
|
re.setPid(entity.getPid());
|
||||||
|
re.setCollectedfrom(entity.getCollectedfrom());
|
||||||
|
|
||||||
|
switch (EntityType.fromClass(clazz)) {
|
||||||
|
case publication:
|
||||||
|
case dataset:
|
||||||
|
case otherresearchproduct:
|
||||||
|
case software:
|
||||||
|
Result result = (Result) entity;
|
||||||
|
|
||||||
|
if (result.getTitle() != null && !result.getTitle().isEmpty()) {
|
||||||
|
re.setTitle(result.getTitle().stream().findFirst().get());
|
||||||
|
}
|
||||||
|
|
||||||
|
re.setDateofacceptance(getValue(result.getDateofacceptance()));
|
||||||
|
re.setPublisher(getValue(result.getPublisher()));
|
||||||
|
re.setResulttype(result.getResulttype());
|
||||||
|
re.setInstances(result.getInstance());
|
||||||
|
|
||||||
|
// TODO still to be mapped
|
||||||
|
// re.setCodeRepositoryUrl(j.read("$.coderepositoryurl"));
|
||||||
|
|
||||||
|
break;
|
||||||
|
case datasource:
|
||||||
|
Datasource d = (Datasource) entity;
|
||||||
|
|
||||||
|
re.setOfficialname(getValue(d.getOfficialname()));
|
||||||
|
re.setWebsiteurl(getValue(d.getWebsiteurl()));
|
||||||
|
re.setDatasourcetype(d.getDatasourcetype());
|
||||||
|
re.setOpenairecompatibility(d.getOpenairecompatibility());
|
||||||
|
|
||||||
|
break;
|
||||||
|
case organization:
|
||||||
|
Organization o = (Organization) entity;
|
||||||
|
|
||||||
|
re.setLegalname(getValue(o.getLegalname()));
|
||||||
|
re.setLegalshortname(getValue(o.getLegalshortname()));
|
||||||
|
re.setCountry(o.getCountry());
|
||||||
|
re.setWebsiteurl(getValue(o.getWebsiteurl()));
|
||||||
|
break;
|
||||||
|
case project:
|
||||||
|
Project p = (Project) entity;
|
||||||
|
|
||||||
|
re.setProjectTitle(getValue(p.getTitle()));
|
||||||
|
re.setCode(getValue(p.getCode()));
|
||||||
|
re.setAcronym(getValue(p.getAcronym()));
|
||||||
|
re.setContracttype(p.getContracttype());
|
||||||
|
|
||||||
|
List<Field<String>> f = p.getFundingtree();
|
||||||
|
if (!f.isEmpty()) {
|
||||||
|
re.setFundingtree(
|
||||||
|
f.stream().map(s -> s.getValue()).collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return re;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String getValue(Field<String> field) {
|
||||||
|
return getFieldValueWithDefault(field, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T> T getFieldValueWithDefault(Field<T> f, T defaultValue) {
|
||||||
|
return Optional.ofNullable(f)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(x -> x.getValue())
|
||||||
|
.orElse(defaultValue);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads a Dataset of eu.dnetlib.dhp.oa.provision.model.SortableRelation objects from a newline
|
* Reads a Dataset of eu.dnetlib.dhp.oa.provision.model.SortableRelation objects from a newline
|
||||||
* delimited json text file,
|
* delimited json text file,
|
||||||
|
|
|
@ -76,9 +76,6 @@ public class PrepareRelationsJob {
|
||||||
String outputPath = parser.get("outputPath");
|
String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
int numPartitions = Integer.parseInt(parser.get("relPartitions"));
|
|
||||||
log.info("relPartitions: {}", numPartitions);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -86,27 +83,14 @@ public class PrepareRelationsJob {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
prepareRelationsFromPaths(spark, inputRelationsPath, outputPath, numPartitions);
|
prepareRelationsFromPaths(spark, inputRelationsPath, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void prepareRelationsFromPaths(
|
private static void prepareRelationsFromPaths(
|
||||||
SparkSession spark, String inputRelationsPath, String outputPath, int numPartitions) {
|
SparkSession spark, String inputRelationsPath, String outputPath) {
|
||||||
readPathRelation(spark, inputRelationsPath)
|
readPathRelation(spark, inputRelationsPath)
|
||||||
.filter(
|
.filter("dataInfo.deletedbyinference == false")
|
||||||
(FilterFunction<SortableRelation>)
|
|
||||||
r -> {
|
|
||||||
try {
|
|
||||||
return r != null
|
|
||||||
&& r.getDataInfo() != null
|
|
||||||
&& !r.getDataInfo().getDeletedbyinference();
|
|
||||||
} catch (NullPointerException e) {
|
|
||||||
log.info(
|
|
||||||
"invalid NPE '{}'",
|
|
||||||
OBJECT_MAPPER.writeValueAsString(r));
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.groupByKey(
|
.groupByKey(
|
||||||
(MapFunction<SortableRelation, String>) value -> value.getSource(),
|
(MapFunction<SortableRelation, String>) value -> value.getSource(),
|
||||||
Encoders.STRING())
|
Encoders.STRING())
|
||||||
|
@ -114,7 +98,6 @@ public class PrepareRelationsJob {
|
||||||
(FlatMapGroupsFunction<String, SortableRelation, SortableRelation>)
|
(FlatMapGroupsFunction<String, SortableRelation, SortableRelation>)
|
||||||
(key, values) -> Iterators.limit(values, MAX_RELS),
|
(key, values) -> Iterators.limit(values, MAX_RELS),
|
||||||
Encoders.bean(SortableRelation.class))
|
Encoders.bean(SortableRelation.class))
|
||||||
.repartition(numPartitions)
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.parquet(outputPath);
|
.parquet(outputPath);
|
||||||
|
|
|
@ -3,14 +3,8 @@ package eu.dnetlib.dhp.oa.provision.utils;
|
||||||
import static org.apache.commons.lang3.StringUtils.substringAfter;
|
import static org.apache.commons.lang3.StringUtils.substringAfter;
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
|
|
||||||
import eu.dnetlib.dhp.schema.common.EntityType;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class GraphMappingUtils {
|
public class GraphMappingUtils {
|
||||||
|
|
||||||
|
@ -18,81 +12,6 @@ public class GraphMappingUtils {
|
||||||
|
|
||||||
public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier");
|
public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier");
|
||||||
|
|
||||||
public static <E extends OafEntity> RelatedEntity asRelatedEntity(E entity, Class<E> clazz) {
|
|
||||||
|
|
||||||
final RelatedEntity re = new RelatedEntity();
|
|
||||||
re.setId(entity.getId());
|
|
||||||
re.setType(EntityType.fromClass(clazz).name());
|
|
||||||
|
|
||||||
re.setPid(entity.getPid());
|
|
||||||
re.setCollectedfrom(entity.getCollectedfrom());
|
|
||||||
|
|
||||||
switch (EntityType.fromClass(clazz)) {
|
|
||||||
case publication:
|
|
||||||
case dataset:
|
|
||||||
case otherresearchproduct:
|
|
||||||
case software:
|
|
||||||
Result result = (Result) entity;
|
|
||||||
|
|
||||||
if (result.getTitle() == null && !result.getTitle().isEmpty()) {
|
|
||||||
re.setTitle(result.getTitle().stream().findFirst().get());
|
|
||||||
}
|
|
||||||
|
|
||||||
re.setDateofacceptance(getValue(result.getDateofacceptance()));
|
|
||||||
re.setPublisher(getValue(result.getPublisher()));
|
|
||||||
re.setResulttype(result.getResulttype());
|
|
||||||
re.setInstances(result.getInstance());
|
|
||||||
|
|
||||||
// TODO still to be mapped
|
|
||||||
// re.setCodeRepositoryUrl(j.read("$.coderepositoryurl"));
|
|
||||||
|
|
||||||
break;
|
|
||||||
case datasource:
|
|
||||||
Datasource d = (Datasource) entity;
|
|
||||||
|
|
||||||
re.setOfficialname(getValue(d.getOfficialname()));
|
|
||||||
re.setWebsiteurl(getValue(d.getWebsiteurl()));
|
|
||||||
re.setDatasourcetype(d.getDatasourcetype());
|
|
||||||
re.setOpenairecompatibility(d.getOpenairecompatibility());
|
|
||||||
|
|
||||||
break;
|
|
||||||
case organization:
|
|
||||||
Organization o = (Organization) entity;
|
|
||||||
|
|
||||||
re.setLegalname(getValue(o.getLegalname()));
|
|
||||||
re.setLegalshortname(getValue(o.getLegalshortname()));
|
|
||||||
re.setCountry(o.getCountry());
|
|
||||||
re.setWebsiteurl(getValue(o.getWebsiteurl()));
|
|
||||||
break;
|
|
||||||
case project:
|
|
||||||
Project p = (Project) entity;
|
|
||||||
|
|
||||||
re.setProjectTitle(getValue(p.getTitle()));
|
|
||||||
re.setCode(getValue(p.getCode()));
|
|
||||||
re.setAcronym(getValue(p.getAcronym()));
|
|
||||||
re.setContracttype(p.getContracttype());
|
|
||||||
|
|
||||||
List<Field<String>> f = p.getFundingtree();
|
|
||||||
if (!f.isEmpty()) {
|
|
||||||
re.setFundingtree(
|
|
||||||
f.stream().map(s -> s.getValue()).collect(Collectors.toList()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return re;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String getValue(Field<String> field) {
|
|
||||||
return getFieldValueWithDefault(field, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <T> T getFieldValueWithDefault(Field<T> f, T defaultValue) {
|
|
||||||
return Optional.ofNullable(f)
|
|
||||||
.filter(Objects::nonNull)
|
|
||||||
.map(x -> x.getValue())
|
|
||||||
.orElse(defaultValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String removePrefix(final String s) {
|
public static String removePrefix(final String s) {
|
||||||
if (s.contains("|")) return substringAfter(s, "|");
|
if (s.contains("|")) return substringAfter(s, "|");
|
||||||
return s;
|
return s;
|
||||||
|
|
|
@ -98,6 +98,7 @@
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputRelationsPath</arg><arg>${inputGraphRootPath}/relation</arg>
|
<arg>--inputRelationsPath</arg><arg>${inputGraphRootPath}/relation</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
||||||
|
|
|
@ -261,7 +261,8 @@
|
||||||
sparkDriverMemory,sparkExecutorMemory,sparkExecutorCores,
|
sparkDriverMemory,sparkExecutorMemory,sparkExecutorCores,
|
||||||
oozie.wf.application.path,projectVersion,oozie.use.system.libpath,
|
oozie.wf.application.path,projectVersion,oozie.use.system.libpath,
|
||||||
oozieActionShareLibForSpark1,spark1YarnHistoryServerAddress,spark1EventLogDir,
|
oozieActionShareLibForSpark1,spark1YarnHistoryServerAddress,spark1EventLogDir,
|
||||||
oozieActionShareLibForSpark2,spark2YarnHistoryServerAddress,spark2EventLogDir
|
oozieActionShareLibForSpark2,spark2YarnHistoryServerAddress,spark2EventLogDir,
|
||||||
|
sparkSqlWarehouseDir
|
||||||
</include>
|
</include>
|
||||||
<includeSystemProperties>true</includeSystemProperties>
|
<includeSystemProperties>true</includeSystemProperties>
|
||||||
<includePropertyKeysFromFiles>
|
<includePropertyKeysFromFiles>
|
||||||
|
|
Loading…
Reference in New Issue