Prepare actionsets for BIP affiliations

This commit is contained in:
Serafeim Chatzopoulos 2023-07-06 15:56:12 +03:00
parent 347a889b20
commit bbc245696e
1 changed files with 91 additions and 79 deletions

View File

@ -5,6 +5,8 @@ import static eu.dnetlib.dhp.actionmanager.Constants.*;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable; import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -12,10 +14,16 @@ import java.util.stream.Stream;
import eu.dnetlib.dhp.actionmanager.Constants; import eu.dnetlib.dhp.actionmanager.Constants;
import eu.dnetlib.dhp.actionmanager.bipaffiliations.model.*; import eu.dnetlib.dhp.actionmanager.bipaffiliations.model.*;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row; import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
@ -31,6 +39,7 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils; import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
import scala.Tuple2;
/** /**
* created the Atomic Action for each tipe of results * created the Atomic Action for each tipe of results
@ -40,6 +49,12 @@ public class PrepareAffiliationRelations implements Serializable {
private static final String DOI = "doi"; private static final String DOI = "doi";
private static final Logger log = LoggerFactory.getLogger(PrepareAffiliationRelations.class); private static final Logger log = LoggerFactory.getLogger(PrepareAffiliationRelations.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String ID_PREFIX = "50|doi_________::";
private static final String TRUST = "0.91";
public static final String BIP_AFFILIATIONS_CLASSID = "sysimport:crosswalk:bipaffiliations";
public static final String BIP_AFFILIATIONS_CLASSNAME = "Imported from BIP! Affiliations";
public static <I extends Result> void main(String[] args) throws Exception { public static <I extends Result> void main(String[] args) throws Exception {
@ -76,93 +91,90 @@ public class PrepareAffiliationRelations implements Serializable {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<AffiliationRelationDeserializer> bipDeserializeJavaRDD = sc JavaRDD<AffiliationRelationDeserializer> affiliationRelationsDeserializeRDD = sc
.textFile(inputPath) .textFile(inputPath)
.map(item -> OBJECT_MAPPER.readValue(item, AffiliationRelationDeserializer.class)); .map(item -> OBJECT_MAPPER.readValue(item, AffiliationRelationDeserializer.class));
// for(AffiliationRelationDeserializer rel: bipDeserializeJavaRDD.collect()){ // for(AffiliationRelationDeserializer rel: bipDeserializeJavaRDD.collect()){
// System.out.println(rel); // System.out.println(rel);
// } // }
JavaRDD<AffiliationRelationModel> affiliationRelations =
bipDeserializeJavaRDD.flatMap(entry ->
entry.getMatchings().stream().flatMap(matching ->
matching.getRorId().stream().map( rorId -> new AffiliationRelationModel(
entry.getDoi(),
rorId,
matching.getConfidence()
))).collect(Collectors.toList()).iterator());
for(AffiliationRelationModel rel: affiliationRelations.collect()){ Dataset<AffiliationRelationModel> affiliationRelations =
System.out.println(rel); spark.createDataset(
} affiliationRelationsDeserializeRDD.flatMap(entry ->
// Dataset<AffiliationRelationModel> relations = spark entry.getMatchings().stream().flatMap(matching ->
// .createDataset(bipDeserializeJavaRDD.flatMap(entry -> { matching.getRorId().stream().map( rorId -> new AffiliationRelationModel(
//// entry.keySet().stream().map(key -> { entry.getDoi(),
// AffiliationRelationModel rel = new AffiliationRelationModel(entry.getDoi()) rorId,
// System.out.println(entry); matching.getConfidence()
// return entry; ))).collect(Collectors.toList())
//// BipScore bs = new BipScore(); .iterator())
//// bs.setId(key); .rdd(),
//// bs.setScoreList(entry.get(key)); Encoders.bean(AffiliationRelationModel.class));
//// return bs;
// }).collect(Collectors.toList()).iterator()).rdd(), Encoßders.bean(AffiliationRelationModel.class));
// bipScores affiliationRelations
// .map((MapFunction<AffiliationRelationModel, Relation>) affRel -> {
// .map((MapFunction<BipScore, Result>) bs -> {
// Result ret = new Result(); String paperId = ID_PREFIX
// + IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", affRel.getDoi()));
// ret.setId(bs.getId()); final String affId = ID_PREFIX
// + IdentifierFactory.md5(CleaningFunctions.normalizePidValue("ror", affRel.getRorId()));
// ret.setMeasures(getMeasure(bs));
// return getRelation(paperId, affId, ModelConstants.HAS_AUTHOR_INSTITUTION);
// return ret;
// }, Encoders.bean(Result.class)) }, Encoders.bean(Relation.class))
// .toJavaRDD() .toJavaRDD()
// .map(p -> new AtomicAction(Result.class, p)) .map(p -> new AtomicAction(Relation.class, p))
// .mapToPair( .mapToPair(
// aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()), aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
// new Text(OBJECT_MAPPER.writeValueAsString(aa)))) new Text(OBJECT_MAPPER.writeValueAsString(aa))))
// .saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class); .saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
// }
//
// private static List<Measure> getMeasure(BipScore value) {
// return value
// .getScoreList()
// .stream()
// .map(score -> {
// Measure m = new Measure();
// m.setId(score.getId());
// m
// .setUnit(
// score
// .getUnit()
// .stream()
// .map(unit -> {
// KeyValue kv = new KeyValue();
// kv.setValue(unit.getValue());
// kv.setKey(unit.getKey());
// kv
// .setDataInfo(
// OafMapperUtils
// .dataInfo(
// false,
// UPDATE_DATA_INFO_TYPE,
// true,
// false,
// OafMapperUtils
// .qualifier(
// UPDATE_MEASURE_BIP_CLASS_ID,
// UPDATE_CLASS_NAME,
// ModelConstants.DNET_PROVENANCE_ACTIONS,
// ModelConstants.DNET_PROVENANCE_ACTIONS),
// ""));
// return kv;
// })
// .collect(Collectors.toList()));
// return m;
// })
// .collect(Collectors.toList());
} }
public static Relation getRelation(String source, String target, String relclass) {
Relation r = new Relation();
r.setCollectedfrom(getCollectedFrom());
r.setSource(source);
r.setTarget(target);
r.setRelClass(relclass);
r.setRelType(ModelConstants.RESULT_ORGANIZATION);
r.setSubRelType(ModelConstants.AFFILIATION);
r.setDataInfo(getDataInfo());
return r;
}
public static List<KeyValue> getCollectedFrom() {
KeyValue kv = new KeyValue();
kv.setKey(ModelConstants.DNET_PROVENANCE_ACTIONS);
kv.setValue(ModelConstants.DNET_PROVENANCE_ACTIONS);
return Collections.singletonList(kv);
}
public static DataInfo getDataInfo() {
DataInfo di = new DataInfo();
di.setInferred(false);
di.setDeletedbyinference(false);
di.setTrust(TRUST);
di.setProvenanceaction(
getQualifier(
BIP_AFFILIATIONS_CLASSID,
BIP_AFFILIATIONS_CLASSNAME,
ModelConstants.DNET_PROVENANCE_ACTIONS
));
return di;
}
public static Qualifier getQualifier(String class_id, String class_name,
String qualifierSchema) {
Qualifier pa = new Qualifier();
pa.setClassid(class_id);
pa.setClassname(class_name);
pa.setSchemeid(qualifierSchema);
pa.setSchemename(qualifierSchema);
return pa;
}
} }