[SKG-IF] fixing issue in deserialization
This commit is contained in:
parent
e8f19ad003
commit
b176bbef1d
|
@ -4,8 +4,10 @@ package eu.dnetlib.dhp.oa.graph.dump.filterentities;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -105,7 +107,7 @@ public class SelectConnectedEntities implements Serializable {
|
||||||
.joinWith(relation, resultIds.col("id").equalTo(relation.col("source")))
|
.joinWith(relation, resultIds.col("id").equalTo(relation.col("source")))
|
||||||
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
// write relations having sorce and target in the set
|
// write relations having source and target in the set
|
||||||
resultIds
|
resultIds
|
||||||
.joinWith(resultSource, resultIds.col("id").equalTo(resultSource.col("target")))
|
.joinWith(resultSource, resultIds.col("id").equalTo(resultSource.col("target")))
|
||||||
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class))
|
.map((MapFunction<Tuple2<Identifiers, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class))
|
||||||
|
@ -153,11 +155,11 @@ public class SelectConnectedEntities implements Serializable {
|
||||||
.json(workingDir + "project");
|
.json(workingDir + "project");
|
||||||
|
|
||||||
// read the results and select all the distinct instance.hostedbykey
|
// read the results and select all the distinct instance.hostedbykey
|
||||||
Dataset<String> hostedbyIds = spark.emptyDataset(Encoders.STRING());
|
Dataset<String> datasourceReferencedIds = spark.emptyDataset(Encoders.STRING());
|
||||||
for (EntityType entity : ModelSupport.entityTypes.keySet())
|
for (EntityType entity : ModelSupport.entityTypes.keySet())
|
||||||
if (ModelSupport.isResult(entity)) {
|
if (ModelSupport.isResult(entity)) {
|
||||||
Class<R> resultClazz = ModelSupport.entityTypes.get(entity);
|
Class<R> resultClazz = ModelSupport.entityTypes.get(entity);
|
||||||
hostedbyIds = hostedbyIds
|
datasourceReferencedIds = datasourceReferencedIds
|
||||||
.union(
|
.union(
|
||||||
Utils
|
Utils
|
||||||
.readPath(spark, workingDir + entity.name(), resultClazz)
|
.readPath(spark, workingDir + entity.name(), resultClazz)
|
||||||
|
@ -165,14 +167,15 @@ public class SelectConnectedEntities implements Serializable {
|
||||||
(FlatMapFunction<R, String>) r -> r
|
(FlatMapFunction<R, String>) r -> r
|
||||||
.getInstance()
|
.getInstance()
|
||||||
.stream()
|
.stream()
|
||||||
.map(i -> i.getHostedby().getKey())
|
.flatMap(i -> Stream.of(i.getHostedby().getKey(), i.getCollectedfrom().getKey()))
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
.iterator(),
|
.iterator(),
|
||||||
Encoders.STRING()));
|
Encoders.STRING()));
|
||||||
}
|
}
|
||||||
|
datasourceReferencedIds = datasourceReferencedIds.distinct();
|
||||||
// join with the datasources and write the datasource in the join
|
// join with the datasources and write the datasource in the join
|
||||||
hostedbyIds
|
datasourceReferencedIds
|
||||||
.joinWith(datasources, hostedbyIds.col("value").equalTo(datasources.col("id")))
|
.joinWith(datasources, datasourceReferencedIds.col("value").equalTo(datasources.col("id")))
|
||||||
.map((MapFunction<Tuple2<String, Datasource>, Datasource>) t2 -> t2._2(), Encoders.bean(Datasource.class))
|
.map((MapFunction<Tuple2<String, Datasource>, Datasource>) t2 -> t2._2(), Encoders.bean(Datasource.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
|
@ -196,20 +199,15 @@ public class SelectConnectedEntities implements Serializable {
|
||||||
// selecting relations between datasources and organizations in the selected set
|
// selecting relations between datasources and organizations in the selected set
|
||||||
Dataset<Datasource> datasourceSbs = Utils.readPath(spark, workingDir + "datasource", Datasource.class);
|
Dataset<Datasource> datasourceSbs = Utils.readPath(spark, workingDir + "datasource", Datasource.class);
|
||||||
Dataset<Relation> dsSourceRels = datasourceSbs
|
Dataset<Relation> dsSourceRels = datasourceSbs
|
||||||
.joinWith(relation, datasourceSbs.col("id").equalTo(relation.col("source")))
|
.joinWith(relation, datasourceSbs.col("id").as("dsId").equalTo(relation.col("source")))
|
||||||
.map((MapFunction<Tuple2<Datasource, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
.map((MapFunction<Tuple2<Datasource, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
|
||||||
dsSourceRels
|
dsSourceRels
|
||||||
.joinWith(organizationSbs, dsSourceRels.col("target").equalTo(organizations.col("id")))
|
.joinWith(organizationSbs, dsSourceRels.col("target").equalTo(organizations.col("id").as("orgId")))
|
||||||
.map((MapFunction<Tuple2<Relation, Organization>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class))
|
.map((MapFunction<Tuple2<Relation, Organization>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Append)
|
.mode(SaveMode.Append)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir + "relation");
|
.json(workingDir + "relation");
|
||||||
|
|
||||||
/**
|
|
||||||
* DATASOURCE_PROVIDED_BY_ORGANIZATION(
|
|
||||||
* "isProvidedBy"),
|
|
||||||
*/
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue