[SKG-IF] fixing issue in deserialization
This commit is contained in:
parent
e8f19ad003
commit
b176bbef1d
|
@ -4,8 +4,10 @@ package eu.dnetlib.dhp.oa.graph.dump.filterentities;
|
|||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
|
@ -105,7 +107,7 @@ public class SelectConnectedEntities implements Serializable {
|
|||
.joinWith(relation, resultIds.col("id").equalTo(relation.col("source")))
|
||||
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
||||
|
||||
// write relations having sorce and target in the set
|
||||
// write relations having source and target in the set
|
||||
resultIds
|
||||
.joinWith(resultSource, resultIds.col("id").equalTo(resultSource.col("target")))
|
||||
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class))
|
||||
|
@ -153,11 +155,11 @@ public class SelectConnectedEntities implements Serializable {
|
|||
.json(workingDir + "project");
|
||||
|
||||
// read the results and select all the distinct instance.hostedbykey
|
||||
Dataset<String> hostedbyIds = spark.emptyDataset(Encoders.STRING());
|
||||
Dataset<String> datasourceReferencedIds = spark.emptyDataset(Encoders.STRING());
|
||||
for (EntityType entity : ModelSupport.entityTypes.keySet())
|
||||
if (ModelSupport.isResult(entity)) {
|
||||
Class<R> resultClazz = ModelSupport.entityTypes.get(entity);
|
||||
hostedbyIds = hostedbyIds
|
||||
datasourceReferencedIds = datasourceReferencedIds
|
||||
.union(
|
||||
Utils
|
||||
.readPath(spark, workingDir + entity.name(), resultClazz)
|
||||
|
@ -165,14 +167,15 @@ public class SelectConnectedEntities implements Serializable {
|
|||
(FlatMapFunction<R, String>) r -> r
|
||||
.getInstance()
|
||||
.stream()
|
||||
.map(i -> i.getHostedby().getKey())
|
||||
.flatMap(i -> Stream.of(i.getHostedby().getKey(), i.getCollectedfrom().getKey()))
|
||||
.collect(Collectors.toList())
|
||||
.iterator(),
|
||||
Encoders.STRING()));
|
||||
}
|
||||
datasourceReferencedIds = datasourceReferencedIds.distinct();
|
||||
// join with the datasources and write the datasource in the join
|
||||
hostedbyIds
|
||||
.joinWith(datasources, hostedbyIds.col("value").equalTo(datasources.col("id")))
|
||||
datasourceReferencedIds
|
||||
.joinWith(datasources, datasourceReferencedIds.col("value").equalTo(datasources.col("id")))
|
||||
.map((MapFunction<Tuple2<String, Datasource>, Datasource>) t2 -> t2._2(), Encoders.bean(Datasource.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
|
@ -196,20 +199,15 @@ public class SelectConnectedEntities implements Serializable {
|
|||
// selecting relations between datasources and organizations in the selected set
|
||||
Dataset<Datasource> datasourceSbs = Utils.readPath(spark, workingDir + "datasource", Datasource.class);
|
||||
Dataset<Relation> dsSourceRels = datasourceSbs
|
||||
.joinWith(relation, datasourceSbs.col("id").equalTo(relation.col("source")))
|
||||
.joinWith(relation, datasourceSbs.col("id").as("dsId").equalTo(relation.col("source")))
|
||||
.map((MapFunction<Tuple2<Datasource, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
||||
dsSourceRels
|
||||
.joinWith(organizationSbs, dsSourceRels.col("target").equalTo(organizations.col("id")))
|
||||
.joinWith(organizationSbs, dsSourceRels.col("target").equalTo(organizations.col("id").as("orgId")))
|
||||
.map((MapFunction<Tuple2<Relation, Organization>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(workingDir + "relation");
|
||||
|
||||
/**
|
||||
* DATASOURCE_PROVIDED_BY_ORGANIZATION(
|
||||
* "isProvidedBy"),
|
||||
*/
|
||||
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue