extended existing code to import of POCI from open citation #340
|
@ -12,6 +12,7 @@ import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -26,9 +27,12 @@ import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class CreateActionSetSparkJob implements Serializable {
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
|
@ -68,9 +72,6 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath {}", outputPath);
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
final String prefix = parser.get("prefix");
|
|
||||||
log.info("prefix {}", prefix);
|
|
||||||
|
|
||||||
final boolean shouldDuplicateRels = Optional
|
final boolean shouldDuplicateRels = Optional
|
||||||
.ofNullable(parser.get("shouldDuplicateRels"))
|
.ofNullable(parser.get("shouldDuplicateRels"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
|
@ -81,21 +82,31 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
extractContent(spark, inputPath, outputPath, shouldDuplicateRels, prefix);
|
extractContent(spark, inputPath, outputPath, shouldDuplicateRels);
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void extractContent(SparkSession spark, String inputPath, String outputPath,
|
private static void extractContent(SparkSession spark, String inputPath, String outputPath,
|
||||||
|
boolean shouldDuplicateRels) {
|
||||||
|
|
||||||
|
getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, "COCI")
|
||||||
|
.union(getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, "POCI"))
|
||||||
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(SparkSession spark, String inputPath,
|
||||||
boolean shouldDuplicateRels, String prefix) {
|
boolean shouldDuplicateRels, String prefix) {
|
||||||
spark
|
return spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputPath + "/*")
|
.textFile(inputPath + "/" + prefix + "/" + prefix + "_JSON/*")
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, COCI>) value -> OBJECT_MAPPER.readValue(value, COCI.class),
|
(MapFunction<String, COCI>) value -> OBJECT_MAPPER.readValue(value, COCI.class),
|
||||||
Encoders.bean(COCI.class))
|
Encoders.bean(COCI.class))
|
||||||
.flatMap(
|
.flatMap(
|
||||||
(FlatMapFunction<COCI, Relation>) value -> createRelation(value, shouldDuplicateRels, prefix)
|
(FlatMapFunction<COCI, Relation>) value -> createRelation(
|
||||||
|
value, shouldDuplicateRels, prefix)
|
||||||
.iterator(),
|
.iterator(),
|
||||||
Encoders.bean(Relation.class))
|
Encoders.bean(Relation.class))
|
||||||
.filter((FilterFunction<Relation>) value -> value != null)
|
.filter((FilterFunction<Relation>) value -> value != null)
|
||||||
|
@ -103,25 +114,30 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
.map(p -> new AtomicAction(p.getClass(), p))
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
||||||
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Relation> createRelation(COCI value, boolean duplicate, String p) {
|
private static List<Relation> createRelation(COCI value, boolean duplicate, String p) {
|
||||||
|
|
||||||
List<Relation> relationList = new ArrayList<>();
|
List<Relation> relationList = new ArrayList<>();
|
||||||
String prefix;
|
String prefix;
|
||||||
|
String citing;
|
||||||
|
String cited;
|
||||||
if (p.equals("COCI")) {
|
if (p.equals("COCI")) {
|
||||||
prefix = DOI_PREFIX;
|
prefix = DOI_PREFIX;
|
||||||
|
citing = prefix
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCiting()));
|
||||||
|
cited = prefix
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCited()));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
prefix = PMID_PREFIX;
|
prefix = PMID_PREFIX;
|
||||||
}
|
citing = prefix
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("pmid", value.getCiting()));
|
||||||
|
cited = prefix
|
||||||
|
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("pmid", value.getCited()));
|
||||||
|
|
||||||
String citing = prefix
|
}
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCiting()));
|
|
||||||
final String cited = prefix
|
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCited()));
|
|
||||||
|
|
||||||
if (!citing.equals(cited)) {
|
if (!citing.equals(cited)) {
|
||||||
relationList
|
relationList
|
||||||
|
@ -143,59 +159,30 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
return relationList;
|
return relationList;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Collection<Relation> getRelations(String citing, String cited) {
|
|
||||||
|
|
||||||
return Arrays
|
|
||||||
.asList(
|
|
||||||
getRelation(citing, cited, ModelConstants.CITES),
|
|
||||||
getRelation(cited, citing, ModelConstants.IS_CITED_BY));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Relation getRelation(
|
public static Relation getRelation(
|
||||||
String source,
|
String source,
|
||||||
String target,
|
String target,
|
||||||
String relclass) {
|
String relclass) {
|
||||||
Relation r = new Relation();
|
|
||||||
r.setCollectedfrom(getCollectedFrom());
|
|
||||||
r.setSource(source);
|
|
||||||
r.setTarget(target);
|
|
||||||
r.setRelClass(relclass);
|
|
||||||
r.setRelType(ModelConstants.RESULT_RESULT);
|
|
||||||
r.setSubRelType(ModelConstants.CITATION);
|
|
||||||
r
|
|
||||||
.setDataInfo(
|
|
||||||
getDataInfo());
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<KeyValue> getCollectedFrom() {
|
return OafMapperUtils
|
||||||
KeyValue kv = new KeyValue();
|
.getRelation(
|
||||||
kv.setKey(ModelConstants.OPENOCITATIONS_ID);
|
source,
|
||||||
kv.setValue(ModelConstants.OPENOCITATIONS_NAME);
|
target,
|
||||||
|
ModelConstants.RESULT_RESULT,
|
||||||
return Arrays.asList(kv);
|
ModelConstants.CITATION,
|
||||||
}
|
relclass,
|
||||||
|
Arrays
|
||||||
public static DataInfo getDataInfo() {
|
.asList(
|
||||||
DataInfo di = new DataInfo();
|
OafMapperUtils.keyValue(ModelConstants.OPENOCITATIONS_ID, ModelConstants.OPENOCITATIONS_NAME)),
|
||||||
di.setInferred(false);
|
OafMapperUtils
|
||||||
di.setDeletedbyinference(false);
|
.dataInfo(
|
||||||
di.setTrust(TRUST);
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
di
|
.qualifier(
|
||||||
.setProvenanceaction(
|
OPENCITATIONS_CLASSID, OPENCITATIONS_CLASSNAME,
|
||||||
getQualifier(OPENCITATIONS_CLASSID, OPENCITATIONS_CLASSNAME, ModelConstants.DNET_PROVENANCE_ACTIONS));
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
return di;
|
TRUST),
|
||||||
}
|
null);
|
||||||
|
|
||||||
public static Qualifier getQualifier(String class_id, String class_name,
|
|
||||||
String qualifierSchema) {
|
|
||||||
Qualifier pa = new Qualifier();
|
|
||||||
pa.setClassid(class_id);
|
|
||||||
pa.setClassname(class_name);
|
|
||||||
pa.setSchemeid(qualifierSchema);
|
|
||||||
pa.setSchemename(qualifierSchema);
|
|
||||||
return pa;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${workingPath}/${prefix}_JSON</arg>
|
<arg>--inputPath</arg><arg>${workingPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
<arg>--prefix</arg><arg>${prefix}</arg>
|
<arg>--prefix</arg><arg>${prefix}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
|
Loading…
Reference in New Issue