1
0
Fork 0

joining entities using T x R x S method with groupByKey, WIP: making target objects (T) have lower memory footprint

This commit is contained in:
Claudio Atzori 2020-01-24 08:17:53 +01:00
parent 799929c1e3
commit a55f5fecc6
9 changed files with 414 additions and 50 deletions

View File

@ -51,5 +51,4 @@ public class EntityRelEntity implements Serializable {
this.target = target; this.target = target;
return this; return this;
} }
} }

View File

@ -3,7 +3,9 @@ package eu.dnetlib.dhp.graph;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.JsonPath;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
@ -24,15 +26,13 @@ public class GraphJoiner implements Serializable {
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
final String entityIdPath = "$.id"; JavaPairRDD<String, TypedRow> datasource = readPathEntity(sc, inputPath, "datasource");
JavaPairRDD<String, TypedRow> organization = readPathEntity(sc, inputPath, "organization");
JavaPairRDD<String, TypedRow> datasource = readPathEntity(sc, entityIdPath, inputPath, "datasource"); JavaPairRDD<String, TypedRow> project = readPathEntity(sc, inputPath, "project");
JavaPairRDD<String, TypedRow> organization = readPathEntity(sc, entityIdPath, inputPath, "organization"); JavaPairRDD<String, TypedRow> dataset = readPathEntity(sc, inputPath, "dataset");
JavaPairRDD<String, TypedRow> project = readPathEntity(sc, entityIdPath, inputPath, "project"); JavaPairRDD<String, TypedRow> otherresearchproduct = readPathEntity(sc, inputPath, "otherresearchproduct");
JavaPairRDD<String, TypedRow> dataset = readPathEntity(sc, entityIdPath, inputPath, "dataset"); JavaPairRDD<String, TypedRow> software = readPathEntity(sc, inputPath, "software");
JavaPairRDD<String, TypedRow> otherresearchproduct = readPathEntity(sc, entityIdPath, inputPath, "otherresearchproduct"); JavaPairRDD<String, TypedRow> publication = readPathEntity(sc, inputPath, "publication");
JavaPairRDD<String, TypedRow> software = readPathEntity(sc, entityIdPath, inputPath, "software");
JavaPairRDD<String, TypedRow> publication = readPathEntity(sc, entityIdPath, inputPath, "publication");
final String entitiesPath = outPath + "/entities"; final String entitiesPath = outPath + "/entities";
datasource datasource
@ -48,28 +48,31 @@ public class GraphJoiner implements Serializable {
JavaPairRDD<String, EntityRelEntity> entities = sc.textFile(entitiesPath) JavaPairRDD<String, EntityRelEntity> entities = sc.textFile(entitiesPath)
.map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class))
.mapToPair(t -> new Tuple2<>(t.getSource().getSource(), t)); .mapToPair(t -> new Tuple2<>(t.getSource().getSourceId(), t));
final JavaPairRDD<String, EntityRelEntity> relation = readPathRelation(sc, inputPath) final JavaPairRDD<String, EntityRelEntity> relation = readPathRelation(sc, inputPath)
.filter(r -> !r.getDeleted())
.map(p -> new EntityRelEntity().setRelation(p)) .map(p -> new EntityRelEntity().setRelation(p))
.mapToPair(p -> new Tuple2<>(p.getRelation().getSource(), p)) .mapToPair(p -> new Tuple2<>(p.getRelation().getSourceId(), p))
.groupByKey() .groupByKey()
.map(p -> Iterables.limit(p._2(), MAX_RELS)) .map(p -> Iterables.limit(p._2(), MAX_RELS))
.flatMap(p -> p.iterator()) .flatMap(p -> p.iterator())
.mapToPair(p -> new Tuple2<>(p.getRelation().getTarget(), p)); .mapToPair(p -> new Tuple2<>(p.getRelation().getTargetId(), p));
final String joinByTargetPath = outPath + "/join_by_target"; final String joinByTargetPath = outPath + "/join_by_target";
relation.join(entities) relation
.join(entities
.filter(e -> !e._2().getSource().getDeleted())
/*.mapToPair(e -> new Tuple2<>(e._1(), new MappingUtils().pruneModel(e._2())))*/)
.map(s -> new EntityRelEntity() .map(s -> new EntityRelEntity()
.setRelation(s._2()._1().getRelation()) .setRelation(s._2()._1().getRelation())
.setTarget(s._2()._2().getSource())) .setTarget(s._2()._2().getSource()))
.map(e -> new ObjectMapper().writeValueAsString(e)) .map(e -> new ObjectMapper().writeValueAsString(e))
.saveAsTextFile(joinByTargetPath, GzipCodec.class); .saveAsTextFile(joinByTargetPath, GzipCodec.class);
JavaPairRDD<String, EntityRelEntity> bySource = sc.textFile(joinByTargetPath) JavaPairRDD<String, EntityRelEntity> bySource = sc.textFile(joinByTargetPath)
.map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class))
.mapToPair(t -> new Tuple2<>(t.getRelation().getSource(), t)); .mapToPair(t -> new Tuple2<>(t.getRelation().getSourceId(), t));
entities entities
.union(bySource) .union(bySource)
@ -97,12 +100,17 @@ public class GraphJoiner implements Serializable {
.saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class);
} }
private JavaPairRDD<String, TypedRow> readPathEntity(final JavaSparkContext sc, final String idPath, final String inputPath, final String type) { private JavaPairRDD<String, TypedRow> readPathEntity(final JavaSparkContext sc, final String inputPath, final String type) {
return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class) return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class)
.mapToPair((PairFunction<Tuple2<Text, Text>, String, TypedRow>) item -> { .mapToPair((PairFunction<Tuple2<Text, Text>, String, TypedRow>) item -> {
final String json = item._2().toString(); final String json = item._2().toString();
final String id = JsonPath.read(json, idPath); final String id = JsonPath.read(json, "$.id");
return new Tuple2<>(id, new TypedRow(id, type, json)); return new Tuple2<>(id, new TypedRow()
.setSourceId(id)
.setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference"))
.setType(type)
.setOaf(json));
}); });
} }
@ -110,9 +118,12 @@ public class GraphJoiner implements Serializable {
return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class)
.map(item -> { .map(item -> {
final String json = item._2().toString(); final String json = item._2().toString();
final String source = JsonPath.read(json, "$.source"); return new TypedRow()
final String target = JsonPath.read(json, "$.target"); .setSourceId(JsonPath.read(json, "$.source"))
return new TypedRow(source, target, "relation", json); .setTargetId(JsonPath.read(json, "$.target"))
.setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference"))
.setType("relation")
.setOaf(json);
}); });
} }

View File

@ -0,0 +1,103 @@
package eu.dnetlib.dhp.graph;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import net.minidev.json.JSONArray;
import java.util.LinkedHashMap;
import java.util.stream.Collectors;
public class MappingUtils {
public EntityRelEntity pruneModel(EntityRelEntity e) throws JsonProcessingException {
final DocumentContext j = JsonPath.parse(e.getSource().getOaf());
final RelatedEntity re = new RelatedEntity();
switch (e.getSource().getType()) {
case "publication":
case "dataset":
case "otherresearchproduct":
case "software":
mapTitle(j, re);
re.setDateofacceptance(j.read("$.dateofacceptance.value"));
re.setPublisher(j.read("$.publisher.value"));
JSONArray pids = j.read("$.pid");
re.setPid(pids.stream()
.map(p -> asStructuredProperty((LinkedHashMap<String, Object>) p))
.collect(Collectors.toList()));
re.setResulttype(asQualifier(j.read("$.resulttype")));
JSONArray collfrom = j.read("$.collectedfrom");
re.setCollectedfrom(collfrom.stream()
.map(c -> asKV((LinkedHashMap<String, Object>)c))
.collect(Collectors.toList()));
//TODO still to be mapped
//re.setCodeRepositoryUrl(j.read("$.coderepositoryurl"));
break;
case "datasource":
re.setOfficialname(j.read("$.officialname.value"));
re.setWebsiteurl(j.read("$.websiteurl.value"));
re.setDatasourcetype(asQualifier(j.read("$.datasourcetype")));
re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility")));
break;
case "organization":
break;
case "project":
mapTitle(j, re);
break;
}
return new EntityRelEntity().setSource(
new TypedRow()
.setSourceId(e.getSource().getSourceId())
.setDeleted(e.getSource().getDeleted())
.setType(e.getSource().getType())
.setOaf(new ObjectMapper().writeValueAsString(re)));
}
private KeyValue asKV(LinkedHashMap<String, Object> j) {
final KeyValue kv = new KeyValue();
kv.setKey((String) j.get("key"));
kv.setValue((String) j.get("value"));
return kv;
}
private void mapTitle(DocumentContext j, RelatedEntity re) {
JSONArray a = j.read("$.title");
if (!a.isEmpty()) {
re.setTitle(asStructuredProperty((LinkedHashMap<String, Object>) a.get(0)));
}
}
private StructuredProperty asStructuredProperty(LinkedHashMap<String, Object> j) {
final StructuredProperty sp = new StructuredProperty();
sp.setValue((String) j.get("value"));
sp.setQualifier(asQualifier((LinkedHashMap<String, String>) j.get("qualifier")));
return sp;
}
public Qualifier asQualifier(LinkedHashMap<String, String> j) {
Qualifier q = new Qualifier();
q.setClassid(j.get("classid"));
q.setClassname(j.get("classname"));
q.setSchemeid(j.get("schemeid"));
q.setSchemename(j.get("schemename"));
return q;
}
}

View File

@ -0,0 +1,210 @@
package eu.dnetlib.dhp.graph;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import java.io.Serializable;
import java.util.List;
public class RelatedEntity implements Serializable {
// results
private StructuredProperty title; // also for projects
private String dateofacceptance;
private String publisher;
private List<StructuredProperty> pid;
private String codeRepositoryUrl;
private Qualifier resulttype;
private List<KeyValue> collectedfrom;
// datasource
private String officialname;
private String websiteurl; // also for organizations, projects
private Qualifier datasourcetype;
private Qualifier datasourcetypeui;
//private String aggregatortype;
private Qualifier openairecompatibility;
// organization
private String legalname;
private String legalshortname;
private Qualifier country;
// project
private String code;
private String acronym;
private Qualifier contracttype;
private String fundingtree;
public StructuredProperty getTitle() {
return title;
}
public RelatedEntity setTitle(StructuredProperty title) {
this.title = title;
return this;
}
public String getDateofacceptance() {
return dateofacceptance;
}
public RelatedEntity setDateofacceptance(String dateofacceptance) {
this.dateofacceptance = dateofacceptance;
return this;
}
public String getPublisher() {
return publisher;
}
public RelatedEntity setPublisher(String publisher) {
this.publisher = publisher;
return this;
}
public List<StructuredProperty> getPid() {
return pid;
}
public RelatedEntity setPid(List<StructuredProperty> pid) {
this.pid = pid;
return this;
}
public String getCodeRepositoryUrl() {
return codeRepositoryUrl;
}
public RelatedEntity setCodeRepositoryUrl(String codeRepositoryUrl) {
this.codeRepositoryUrl = codeRepositoryUrl;
return this;
}
public Qualifier getResulttype() {
return resulttype;
}
public RelatedEntity setResulttype(Qualifier resulttype) {
this.resulttype = resulttype;
return this;
}
public List<KeyValue> getCollectedfrom() {
return collectedfrom;
}
public RelatedEntity setCollectedfrom(List<KeyValue> collectedfrom) {
this.collectedfrom = collectedfrom;
return this;
}
public String getOfficialname() {
return officialname;
}
public RelatedEntity setOfficialname(String officialname) {
this.officialname = officialname;
return this;
}
public String getWebsiteurl() {
return websiteurl;
}
public RelatedEntity setWebsiteurl(String websiteurl) {
this.websiteurl = websiteurl;
return this;
}
public Qualifier getDatasourcetype() {
return datasourcetype;
}
public RelatedEntity setDatasourcetype(Qualifier datasourcetype) {
this.datasourcetype = datasourcetype;
return this;
}
public Qualifier getDatasourcetypeui() {
return datasourcetypeui;
}
public RelatedEntity setDatasourcetypeui(Qualifier datasourcetypeui) {
this.datasourcetypeui = datasourcetypeui;
return this;
}
public Qualifier getOpenairecompatibility() {
return openairecompatibility;
}
public RelatedEntity setOpenairecompatibility(Qualifier openairecompatibility) {
this.openairecompatibility = openairecompatibility;
return this;
}
public String getLegalname() {
return legalname;
}
public RelatedEntity setLegalname(String legalname) {
this.legalname = legalname;
return this;
}
public String getLegalshortname() {
return legalshortname;
}
public RelatedEntity setLegalshortname(String legalshortname) {
this.legalshortname = legalshortname;
return this;
}
public Qualifier getCountry() {
return country;
}
public RelatedEntity setCountry(Qualifier country) {
this.country = country;
return this;
}
public String getCode() {
return code;
}
public RelatedEntity setCode(String code) {
this.code = code;
return this;
}
public String getAcronym() {
return acronym;
}
public RelatedEntity setAcronym(String acronym) {
this.acronym = acronym;
return this;
}
public Qualifier getContracttype() {
return contracttype;
}
public RelatedEntity setContracttype(Qualifier contracttype) {
this.contracttype = contracttype;
return this;
}
public String getFundingtree() {
return fundingtree;
}
public RelatedEntity setFundingtree(String fundingtree) {
this.fundingtree = fundingtree;
return this;
}
}

View File

@ -1,8 +1,5 @@
package eu.dnetlib.dhp.graph; package eu.dnetlib.dhp.graph;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.Relation;
import java.io.Serializable; import java.io.Serializable;
public class Tuple implements Serializable { public class Tuple implements Serializable {
@ -11,6 +8,7 @@ public class Tuple implements Serializable {
private TypedRow target; private TypedRow target;
public TypedRow getRelation() { public TypedRow getRelation() {
return relation; return relation;
} }

View File

@ -4,40 +4,40 @@ import java.io.Serializable;
public class TypedRow implements Serializable { public class TypedRow implements Serializable {
private String source; private String sourceId;
private String target;
private String targetId;
private Boolean deleted;
private String type; private String type;
private String oaf; private String oaf;
public TypedRow() { public String getSourceId() {
return sourceId;
} }
public TypedRow(String source, String type, String oaf) { public TypedRow setSourceId(String sourceId) {
this.source = source; this.sourceId = sourceId;
this.type = type;
this.oaf = oaf;
}
public TypedRow(String source, String target, String type, String oaf) {
this(source, type, oaf);
this.target = target;
}
public String getSource() {
return source;
}
public TypedRow setSource(String source) {
this.source = source;
return this; return this;
} }
public String getTarget() { public String getTargetId() {
return target; return targetId;
} }
public TypedRow setTarget(String target) { public TypedRow setTargetId(String targetId) {
this.target = target; this.targetId = targetId;
return this;
}
public Boolean getDeleted() {
return deleted;
}
public TypedRow setDeleted(Boolean deleted) {
this.deleted = deleted;
return this; return this;
} }
@ -58,5 +58,4 @@ public class TypedRow implements Serializable {
this.oaf = oaf; this.oaf = oaf;
return this; return this;
} }
} }

View File

@ -0,0 +1,42 @@
package eu.dnetlib.dhp.graph;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStreamReader;
public class MappingUtilsTest {
private MappingUtils utils;
@Before
public void setUp() {
utils = new MappingUtils();
}
@Test
public void testOafMappingDatasource() throws IOException {
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("datasource.json"));
final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class);
e.getSource().setType("datasource");
final EntityRelEntity out = utils.pruneModel(e);
System.out.println(out);
}
@Test
public void testOafMappinResult() throws IOException {
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json"));
final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class);
e.getSource().setType("otherresearchproduct");
final EntityRelEntity out = utils.pruneModel(e);
System.out.println(out);
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long