forked from D-Net/dnet-hadoop
Changed the way to find results linked to projects. We verify to actually have the project on the graph before selecting the result
This commit is contained in:
parent
52ce35d57b
commit
320cf02d96
|
@ -24,7 +24,8 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
/**
|
/**
|
||||||
* Selects the results linked to projects. Only for these results the dump will be performed.
|
* Selects the results linked to projects. Only for these results the dump will be performed.
|
||||||
* The code to perform the dump and to expend the dumped results with the informaiton related to projects
|
* The code to perform the dump and to expend the dumped results with the informaiton related to projects
|
||||||
|
@ -59,8 +60,8 @@ public class SparkResultLinkedToProject implements Serializable {
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
final String relationPath = parser.get("relationPath");
|
final String graphPath = parser.get("graphPath");
|
||||||
log.info("relationPath: {}", relationPath);
|
log.info("graphPath: {}", graphPath);
|
||||||
|
|
||||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
@ -70,34 +71,32 @@ public class SparkResultLinkedToProject implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
writeResultsLinkedToProjects(spark, inputClazz, inputPath, outputPath, relationPath);
|
writeResultsLinkedToProjects(spark, inputClazz, inputPath, outputPath, graphPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
||||||
String inputPath, String outputPath, String relationPath) {
|
String inputPath, String outputPath, String graphPath) {
|
||||||
|
|
||||||
Dataset<R> results = Utils
|
Dataset<R> results = Utils
|
||||||
.readPath(spark, inputPath, inputClazz)
|
.readPath(spark, inputPath, inputClazz)
|
||||||
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
||||||
Dataset<Relation> relations = Utils
|
Dataset<Relation> relations = Utils
|
||||||
.readPath(spark, relationPath, Relation.class)
|
.readPath(spark, graphPath + "/relation", Relation.class)
|
||||||
.filter(
|
.filter(
|
||||||
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
||||||
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
||||||
|
|
||||||
relations
|
spark
|
||||||
.joinWith(
|
.sql(
|
||||||
results, relations.col("source").equalTo(results.col("id")),
|
"Select res.* " +
|
||||||
"inner")
|
"from relation rel " +
|
||||||
.groupByKey(
|
"join result res " +
|
||||||
(MapFunction<Tuple2<Relation, R>, String>) value -> value
|
"on rel.source = res.id " +
|
||||||
._2()
|
"join project p " +
|
||||||
.getId(),
|
"on rel.target = p.id " +
|
||||||
Encoders.STRING())
|
"")
|
||||||
.mapGroups((MapGroupsFunction<String, Tuple2<Relation, R>, R>) (k, it) -> {
|
.as(Encoders.bean(inputClazz))
|
||||||
return it.next()._2();
|
|
||||||
}, Encoders.bean(inputClazz))
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
|
Loading…
Reference in New Issue