forked from D-Net/dnet-hadoop
code formatting
This commit is contained in:
parent
7ad573d023
commit
45057440c1
|
@ -117,12 +117,13 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
|
||||
private static Dataset<Relation> createNewRels(
|
||||
Dataset<Relation> rels,
|
||||
Dataset<Tuple2<String,String>> mergedIds,
|
||||
Dataset<Tuple2<String, String>> mergedIds,
|
||||
MapFunction<Tuple2<Tuple2<Tuple3<String, Relation, String>, Tuple2<String, String>>, Tuple2<String, String>>, Relation> mapRel) {
|
||||
|
||||
Dataset<Tuple3<String, Relation, String>> mapped = rels
|
||||
.map(
|
||||
(MapFunction<Relation, Tuple3<String, Relation, String>>) r -> new Tuple3<>(getId(r, FieldType.SOURCE), r, getId(r, FieldType.TARGET)),
|
||||
(MapFunction<Relation, Tuple3<String, Relation, String>>) r -> new Tuple3<>(getId(r, FieldType.SOURCE),
|
||||
r, getId(r, FieldType.TARGET)),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class), Encoders.STRING()));
|
||||
|
||||
Dataset<Tuple2<Tuple3<String, Relation, String>, Tuple2<String, String>>> relSource = mapped
|
||||
|
@ -130,7 +131,9 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
|
||||
return relSource
|
||||
.joinWith(mergedIds, relSource.col("_1._3").equalTo(mergedIds.col("_2")), "left_outer")
|
||||
.filter((FilterFunction<Tuple2<Tuple2<Tuple3<String, Relation, String>, Tuple2<String, String>>, Tuple2<String, String>>>) r -> r._2() != null || r._1() != null)
|
||||
.filter(
|
||||
(FilterFunction<Tuple2<Tuple2<Tuple3<String, Relation, String>, Tuple2<String, String>>, Tuple2<String, String>>>) r -> r
|
||||
._2() != null || r._1() != null)
|
||||
.map(mapRel, Encoders.bean(Relation.class))
|
||||
.distinct();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue