1
0
Fork 0

added FilterFunction specification if filter operation

This commit is contained in:
Miriam Baglioni 2020-11-25 13:42:31 +01:00
parent bde6d337dd
commit 21ce175d17
1 changed files with 2 additions and 1 deletions

View File

@ -9,6 +9,7 @@ import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
@ -54,7 +55,7 @@ public class CommunitySplit implements Serializable {
private static void printResult(String c, Dataset<CommunityResult> result, String outputPath) { private static void printResult(String c, Dataset<CommunityResult> result, String outputPath) {
Dataset<CommunityResult> community_products = result Dataset<CommunityResult> community_products = result
.filter(r -> containsCommunity(r, c)); .filter((FilterFunction<CommunityResult>) r -> containsCommunity(r, c));
try { try {
community_products.first(); community_products.first();