From 1763d377ad4ccdbc4dce827f2474cfc3f626024c Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 23 Nov 2023 16:33:24 +0100 Subject: [PATCH] code formatting --- .../eu/dnetlib/pace/util/DiffPatchMatch.java | 1 - .../PrepareAffiliationRelationsTest.java | 4 +- .../PrepareResultCommunitySet.java | 152 ++++++------- .../ResultProjectList.java | 28 +-- .../SparkResultToCommunityFromProject.java | 206 +++++++++--------- .../ResultToCommunityJobTest.java | 158 +++++++------- 6 files changed, 274 insertions(+), 275 deletions(-) diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/util/DiffPatchMatch.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/util/DiffPatchMatch.java index 12c96500e..cfd9acd70 100644 --- a/dhp-pace-core/src/main/java/eu/dnetlib/pace/util/DiffPatchMatch.java +++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/util/DiffPatchMatch.java @@ -18,7 +18,6 @@ package eu.dnetlib.pace.util; * See the License for the specific language governing permissions and * limitations under the License. */ - /* * Diff Match and Patch * Copyright 2018 The diff-match-patch Authors. diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/bipaffiliations/PrepareAffiliationRelationsTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/bipaffiliations/PrepareAffiliationRelationsTest.java index e2639996c..b87738879 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/bipaffiliations/PrepareAffiliationRelationsTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/bipaffiliations/PrepareAffiliationRelationsTest.java @@ -79,8 +79,8 @@ public class PrepareAffiliationRelationsTest { .getPath(); String pubmedAffiliationRelationsPath = getClass() - .getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json") - .getPath(); + .getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json") + .getPath(); String outputPath = workingDir.toString() + "/actionSet"; diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/PrepareResultCommunitySet.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/PrepareResultCommunitySet.java index 82625a122..7bee1ea0c 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/PrepareResultCommunitySet.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/PrepareResultCommunitySet.java @@ -31,94 +31,94 @@ import scala.Tuple2; public class PrepareResultCommunitySet { - private static final Logger log = LoggerFactory.getLogger(PrepareResultCommunitySet.class); + private static final Logger log = LoggerFactory.getLogger(PrepareResultCommunitySet.class); - public static void main(String[] args) throws Exception { - String jsonConfiguration = IOUtils - .toString( - PrepareResultCommunitySet.class - .getResourceAsStream( - "/eu/dnetlib/dhp/resulttocommunityfromproject/input_preparecommunitytoresult_parameters.json")); + public static void main(String[] args) throws Exception { + String jsonConfiguration = IOUtils + .toString( + PrepareResultCommunitySet.class + .getResourceAsStream( + "/eu/dnetlib/dhp/resulttocommunityfromproject/input_preparecommunitytoresult_parameters.json")); - final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); - parser.parseArgument(args); + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + parser.parseArgument(args); - Boolean isSparkSessionManaged = isSparkSessionManaged(parser); - log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + Boolean isSparkSessionManaged = isSparkSessionManaged(parser); + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); - String inputPath = parser.get("sourcePath"); - log.info("inputPath: {}", inputPath); + String inputPath = parser.get("sourcePath"); + log.info("inputPath: {}", inputPath); - final String outputPath = parser.get("outputPath"); - log.info("outputPath: {}", outputPath); + final String outputPath = parser.get("outputPath"); + log.info("outputPath: {}", outputPath); - final boolean production = Boolean.valueOf(parser.get("production")); - log.info("production: {}", production); + final boolean production = Boolean.valueOf(parser.get("production")); + log.info("production: {}", production); - final CommunityEntityMap projectsMap = Utils.getCommunityProjects(production); - // log.info("projectsMap: {}", new Gson().toJson(projectsMap)); + final CommunityEntityMap projectsMap = Utils.getCommunityProjects(production); + // log.info("projectsMap: {}", new Gson().toJson(projectsMap)); - SparkConf conf = new SparkConf(); + SparkConf conf = new SparkConf(); - runWithSparkSession( - conf, - isSparkSessionManaged, - spark -> { - removeOutputDir(spark, outputPath); - prepareInfo(spark, inputPath, outputPath, projectsMap); - }); - } + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + removeOutputDir(spark, outputPath); + prepareInfo(spark, inputPath, outputPath, projectsMap); + }); + } - private static void prepareInfo( - SparkSession spark, - String inputPath, - String outputPath, - CommunityEntityMap projectMap) { + private static void prepareInfo( + SparkSession spark, + String inputPath, + String outputPath, + CommunityEntityMap projectMap) { - final StructType structureSchema = new StructType() - .add( - "dataInfo", new StructType() - .add("deletedbyinference", DataTypes.BooleanType) - .add("invisible", DataTypes.BooleanType)) - .add("source", DataTypes.StringType) - .add("target", DataTypes.StringType) - .add("relClass", DataTypes.StringType); + final StructType structureSchema = new StructType() + .add( + "dataInfo", new StructType() + .add("deletedbyinference", DataTypes.BooleanType) + .add("invisible", DataTypes.BooleanType)) + .add("source", DataTypes.StringType) + .add("target", DataTypes.StringType) + .add("relClass", DataTypes.StringType); - spark - .read() - .schema(structureSchema) - .json(inputPath) - .filter( - "dataInfo.deletedbyinference != true " + - "and relClass == '" + ModelConstants.IS_PRODUCED_BY + "'") - .select( - new Column("source").as("resultId"), - new Column("target").as("projectId")) - .groupByKey((MapFunction) r -> (String) r.getAs("resultId"), Encoders.STRING()) - .mapGroups((MapGroupsFunction) (k, v) -> { - ResultProjectList rpl = new ResultProjectList(); - rpl.setResultId(k); - ArrayList cl = new ArrayList<>(); - cl.addAll(projectMap.get(v.next().getAs("projectId"))); - v.forEachRemaining(r -> { - projectMap - .get(r.getAs("projectId")) - .forEach(c -> { - if (!cl.contains(c)) - cl.add(c); - }); + spark + .read() + .schema(structureSchema) + .json(inputPath) + .filter( + "dataInfo.deletedbyinference != true " + + "and relClass == '" + ModelConstants.IS_PRODUCED_BY + "'") + .select( + new Column("source").as("resultId"), + new Column("target").as("projectId")) + .groupByKey((MapFunction) r -> (String) r.getAs("resultId"), Encoders.STRING()) + .mapGroups((MapGroupsFunction) (k, v) -> { + ResultProjectList rpl = new ResultProjectList(); + rpl.setResultId(k); + ArrayList cl = new ArrayList<>(); + cl.addAll(projectMap.get(v.next().getAs("projectId"))); + v.forEachRemaining(r -> { + projectMap + .get(r.getAs("projectId")) + .forEach(c -> { + if (!cl.contains(c)) + cl.add(c); + }); - }); - if (cl.size() == 0) - return null; - rpl.setCommunityList(cl); - return rpl; - }, Encoders.bean(ResultProjectList.class)) - .filter(Objects::nonNull) - .write() - .mode(SaveMode.Overwrite) - .option("compression", "gzip") - .json(outputPath); - } + }); + if (cl.size() == 0) + return null; + rpl.setCommunityList(cl); + return rpl; + }, Encoders.bean(ResultProjectList.class)) + .filter(Objects::nonNull) + .write() + .mode(SaveMode.Overwrite) + .option("compression", "gzip") + .json(outputPath); + } } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultProjectList.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultProjectList.java index ce96bbfac..44798a1f3 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultProjectList.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultProjectList.java @@ -5,22 +5,22 @@ import java.io.Serializable; import java.util.ArrayList; public class ResultProjectList implements Serializable { - private String resultId; - private ArrayList communityList; + private String resultId; + private ArrayList communityList; - public String getResultId() { - return resultId; - } + public String getResultId() { + return resultId; + } - public void setResultId(String resultId) { - this.resultId = resultId; - } + public void setResultId(String resultId) { + this.resultId = resultId; + } - public ArrayList getCommunityList() { - return communityList; - } + public ArrayList getCommunityList() { + return communityList; + } - public void setCommunityList(ArrayList communityList) { - this.communityList = communityList; - } + public void setCommunityList(ArrayList communityList) { + this.communityList = communityList; + } } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/SparkResultToCommunityFromProject.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/SparkResultToCommunityFromProject.java index 13202d79d..6e298cf94 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/SparkResultToCommunityFromProject.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromproject/SparkResultToCommunityFromProject.java @@ -37,127 +37,127 @@ import scala.Tuple2; * @Date 11/10/23 */ public class SparkResultToCommunityFromProject implements Serializable { - private static final Logger log = LoggerFactory.getLogger(SparkResultToCommunityFromProject.class); + private static final Logger log = LoggerFactory.getLogger(SparkResultToCommunityFromProject.class); - public static void main(String[] args) throws Exception { - String jsonConfiguration = IOUtils - .toString( - SparkResultToCommunityFromProject.class - .getResourceAsStream( - "/eu/dnetlib/dhp/resulttocommunityfromproject/input_communitytoresult_parameters.json")); + public static void main(String[] args) throws Exception { + String jsonConfiguration = IOUtils + .toString( + SparkResultToCommunityFromProject.class + .getResourceAsStream( + "/eu/dnetlib/dhp/resulttocommunityfromproject/input_communitytoresult_parameters.json")); - final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); - parser.parseArgument(args); + parser.parseArgument(args); - Boolean isSparkSessionManaged = isSparkSessionManaged(parser); - log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + Boolean isSparkSessionManaged = isSparkSessionManaged(parser); + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); - String inputPath = parser.get("sourcePath"); - log.info("inputPath: {}", inputPath); + String inputPath = parser.get("sourcePath"); + log.info("inputPath: {}", inputPath); - final String outputPath = parser.get("outputPath"); - log.info("outputPath: {}", outputPath); + final String outputPath = parser.get("outputPath"); + log.info("outputPath: {}", outputPath); - final String possibleupdatespath = parser.get("preparedInfoPath"); - log.info("preparedInfoPath: {}", possibleupdatespath); + final String possibleupdatespath = parser.get("preparedInfoPath"); + log.info("preparedInfoPath: {}", possibleupdatespath); - SparkConf conf = new SparkConf(); + SparkConf conf = new SparkConf(); - runWithSparkSession( - conf, - isSparkSessionManaged, - spark -> { + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { - execPropagation(spark, inputPath, outputPath, possibleupdatespath); + execPropagation(spark, inputPath, outputPath, possibleupdatespath); - }); - } + }); + } - private static void execPropagation( - SparkSession spark, - String inputPath, - String outputPath, + private static void execPropagation( + SparkSession spark, + String inputPath, + String outputPath, - String possibleUpdatesPath) { + String possibleUpdatesPath) { - Dataset possibleUpdates = readPath(spark, possibleUpdatesPath, ResultProjectList.class); + Dataset possibleUpdates = readPath(spark, possibleUpdatesPath, ResultProjectList.class); - ModelSupport.entityTypes - .keySet() - .parallelStream() - .forEach(e -> { - if (ModelSupport.isResult(e)) { - removeOutputDir(spark, outputPath + e.name()); - Class resultClazz = ModelSupport.entityTypes.get(e); - Dataset result = readPath(spark, inputPath + e.name(), resultClazz); + ModelSupport.entityTypes + .keySet() + .parallelStream() + .forEach(e -> { + if (ModelSupport.isResult(e)) { + removeOutputDir(spark, outputPath + e.name()); + Class resultClazz = ModelSupport.entityTypes.get(e); + Dataset result = readPath(spark, inputPath + e.name(), resultClazz); - result - .joinWith( - possibleUpdates, - result.col("id").equalTo(possibleUpdates.col("resultId")), - "left_outer") - .map(resultCommunityFn(), Encoders.bean(resultClazz)) - .write() - .mode(SaveMode.Overwrite) - .option("compression", "gzip") - .json(outputPath + e.name()); - } - }); + result + .joinWith( + possibleUpdates, + result.col("id").equalTo(possibleUpdates.col("resultId")), + "left_outer") + .map(resultCommunityFn(), Encoders.bean(resultClazz)) + .write() + .mode(SaveMode.Overwrite) + .option("compression", "gzip") + .json(outputPath + e.name()); + } + }); - } + } - private static MapFunction, R> resultCommunityFn() { - return value -> { - R ret = value._1(); - Optional rcl = Optional.ofNullable(value._2()); - if (rcl.isPresent()) { - // ArrayList communitySet = rcl.get().getCommunityList(); - List contextList = ret - .getContext() - .stream() - .map(Context::getId) - .collect(Collectors.toList()); + private static MapFunction, R> resultCommunityFn() { + return value -> { + R ret = value._1(); + Optional rcl = Optional.ofNullable(value._2()); + if (rcl.isPresent()) { + // ArrayList communitySet = rcl.get().getCommunityList(); + List contextList = ret + .getContext() + .stream() + .map(Context::getId) + .collect(Collectors.toList()); - @SuppressWarnings("unchecked") - R res = (R) ret.getClass().newInstance(); + @SuppressWarnings("unchecked") + R res = (R) ret.getClass().newInstance(); - res.setId(ret.getId()); - List propagatedContexts = new ArrayList<>(); - for (String cId : rcl.get().getCommunityList()) { - if (!contextList.contains(cId)) { - Context newContext = new Context(); - newContext.setId(cId); - newContext - .setDataInfo( - Arrays - .asList( - getDataInfo( - PROPAGATION_DATA_INFO_TYPE, - PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_ID, - PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_NAME, - ModelConstants.DNET_PROVENANCE_ACTIONS))); - propagatedContexts.add(newContext); - } else { - ret - .getContext() - .stream() - .filter(c -> c.getId().equals(cId)) - .findFirst() - .get() - .getDataInfo() - .add( - getDataInfo( - PROPAGATION_DATA_INFO_TYPE, - PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_ID, - PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_NAME, - ModelConstants.DNET_PROVENANCE_ACTIONS)); - } - } - res.setContext(propagatedContexts); - ret.mergeFrom(res); - } - return ret; - }; - } + res.setId(ret.getId()); + List propagatedContexts = new ArrayList<>(); + for (String cId : rcl.get().getCommunityList()) { + if (!contextList.contains(cId)) { + Context newContext = new Context(); + newContext.setId(cId); + newContext + .setDataInfo( + Arrays + .asList( + getDataInfo( + PROPAGATION_DATA_INFO_TYPE, + PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_ID, + PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_NAME, + ModelConstants.DNET_PROVENANCE_ACTIONS))); + propagatedContexts.add(newContext); + } else { + ret + .getContext() + .stream() + .filter(c -> c.getId().equals(cId)) + .findFirst() + .get() + .getDataInfo() + .add( + getDataInfo( + PROPAGATION_DATA_INFO_TYPE, + PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_ID, + PROPAGATION_RESULT_COMMUNITY_PROJECT_CLASS_NAME, + ModelConstants.DNET_PROVENANCE_ACTIONS)); + } + } + res.setContext(propagatedContexts); + ret.mergeFrom(res); + } + return ret; + }; + } } diff --git a/dhp-workflows/dhp-enrichment/src/test/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultToCommunityJobTest.java b/dhp-workflows/dhp-enrichment/src/test/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultToCommunityJobTest.java index 40b9745c5..5642501b3 100644 --- a/dhp-workflows/dhp-enrichment/src/test/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultToCommunityJobTest.java +++ b/dhp-workflows/dhp-enrichment/src/test/java/eu/dnetlib/dhp/resulttocommunityfromproject/ResultToCommunityJobTest.java @@ -31,103 +31,103 @@ import eu.dnetlib.dhp.schema.oaf.Dataset; public class ResultToCommunityJobTest { - private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class); + private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private static SparkSession spark; + private static SparkSession spark; - private static Path workingDir; + private static Path workingDir; - @BeforeAll - public static void beforeAll() throws IOException { - workingDir = Files.createTempDirectory(ResultToCommunityJobTest.class.getSimpleName()); - log.info("using work dir {}", workingDir); + @BeforeAll + public static void beforeAll() throws IOException { + workingDir = Files.createTempDirectory(ResultToCommunityJobTest.class.getSimpleName()); + log.info("using work dir {}", workingDir); - SparkConf conf = new SparkConf(); - conf.setAppName(ResultToCommunityJobTest.class.getSimpleName()); + SparkConf conf = new SparkConf(); + conf.setAppName(ResultToCommunityJobTest.class.getSimpleName()); - conf.setMaster("local[*]"); - conf.set("spark.driver.host", "localhost"); - conf.set("hive.metastore.local", "true"); - conf.set("spark.ui.enabled", "false"); - conf.set("spark.sql.warehouse.dir", workingDir.toString()); - conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); + conf.setMaster("local[*]"); + conf.set("spark.driver.host", "localhost"); + conf.set("hive.metastore.local", "true"); + conf.set("spark.ui.enabled", "false"); + conf.set("spark.sql.warehouse.dir", workingDir.toString()); + conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); - spark = SparkSession - .builder() - .appName(OrcidPropagationJobTest.class.getSimpleName()) - .config(conf) - .getOrCreate(); - } + spark = SparkSession + .builder() + .appName(OrcidPropagationJobTest.class.getSimpleName()) + .config(conf) + .getOrCreate(); + } - @AfterAll - public static void afterAll() throws IOException { - FileUtils.deleteDirectory(workingDir.toFile()); - spark.stop(); - } + @AfterAll + public static void afterAll() throws IOException { + FileUtils.deleteDirectory(workingDir.toFile()); + spark.stop(); + } - @Test - void testSparkResultToCommunityFromProjectJob() throws Exception { - final String preparedInfoPath = getClass() - .getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/preparedInfo") - .getPath(); - SparkResultToCommunityFromProject - .main( - new String[] { + @Test + void testSparkResultToCommunityFromProjectJob() throws Exception { + final String preparedInfoPath = getClass() + .getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/preparedInfo") + .getPath(); + SparkResultToCommunityFromProject + .main( + new String[] { - "-isSparkSessionManaged", Boolean.FALSE.toString(), - "-sourcePath", getClass() - .getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/sample/") - .getPath(), + "-isSparkSessionManaged", Boolean.FALSE.toString(), + "-sourcePath", getClass() + .getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/sample/") + .getPath(), - "-outputPath", workingDir.toString() + "/", - "-preparedInfoPath", preparedInfoPath - }); + "-outputPath", workingDir.toString() + "/", + "-preparedInfoPath", preparedInfoPath + }); - final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); + final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); - JavaRDD tmp = sc - .textFile(workingDir.toString() + "/dataset") - .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); + JavaRDD tmp = sc + .textFile(workingDir.toString() + "/dataset") + .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); - Assertions.assertEquals(10, tmp.count()); - /** - * {"resultId":"50|57a035e5b1ae::d5be548ca7ae489d762f893be67af52f","communityList":["aurora"]} - * {"resultId":"50|57a035e5b1ae::a77232ffca9115fcad51c3503dbc7e3e","communityList":["aurora"]} - * {"resultId":"50|57a035e5b1ae::803aaad4decab7e27cd4b52a1931b3a1","communityList":["sdsn-gr"]} - * {"resultId":"50|57a035e5b1ae::a02e9e4087bca50687731ae5c765b5e1","communityList":["netherlands"]} - */ - List context = tmp - .filter(r -> r.getId().equals("50|57a035e5b1ae::d5be548ca7ae489d762f893be67af52f")) - .first() - .getContext(); - Assertions.assertTrue(context.stream().anyMatch(c -> containsResultCommunityProject(c))); + Assertions.assertEquals(10, tmp.count()); + /** + * {"resultId":"50|57a035e5b1ae::d5be548ca7ae489d762f893be67af52f","communityList":["aurora"]} + * {"resultId":"50|57a035e5b1ae::a77232ffca9115fcad51c3503dbc7e3e","communityList":["aurora"]} + * {"resultId":"50|57a035e5b1ae::803aaad4decab7e27cd4b52a1931b3a1","communityList":["sdsn-gr"]} + * {"resultId":"50|57a035e5b1ae::a02e9e4087bca50687731ae5c765b5e1","communityList":["netherlands"]} + */ + List context = tmp + .filter(r -> r.getId().equals("50|57a035e5b1ae::d5be548ca7ae489d762f893be67af52f")) + .first() + .getContext(); + Assertions.assertTrue(context.stream().anyMatch(c -> containsResultCommunityProject(c))); - context = tmp - .filter(r -> r.getId().equals("50|57a035e5b1ae::a77232ffca9115fcad51c3503dbc7e3e")) - .first() - .getContext(); - Assertions.assertTrue(context.stream().anyMatch(c -> containsResultCommunityProject(c))); + context = tmp + .filter(r -> r.getId().equals("50|57a035e5b1ae::a77232ffca9115fcad51c3503dbc7e3e")) + .first() + .getContext(); + Assertions.assertTrue(context.stream().anyMatch(c -> containsResultCommunityProject(c))); - Assertions - .assertEquals( - 0, tmp.filter(r -> r.getId().equals("50|57a035e5b1ae::803aaad4decab7e27cd4b52a1931b3a1")).count()); + Assertions + .assertEquals( + 0, tmp.filter(r -> r.getId().equals("50|57a035e5b1ae::803aaad4decab7e27cd4b52a1931b3a1")).count()); - Assertions - .assertEquals( - 0, tmp.filter(r -> r.getId().equals("50|57a035e5b1ae::a02e9e4087bca50687731ae5c765b5e1")).count()); + Assertions + .assertEquals( + 0, tmp.filter(r -> r.getId().equals("50|57a035e5b1ae::a02e9e4087bca50687731ae5c765b5e1")).count()); - Assertions - .assertEquals( - 2, tmp.filter(r -> r.getContext().stream().anyMatch(c -> c.getId().equals("aurora"))).count()); + Assertions + .assertEquals( + 2, tmp.filter(r -> r.getContext().stream().anyMatch(c -> c.getId().equals("aurora"))).count()); - } + } - private static boolean containsResultCommunityProject(Context c) { - return c - .getDataInfo() - .stream() - .anyMatch(di -> di.getProvenanceaction().getClassid().equals("result:community:project")); - } + private static boolean containsResultCommunityProject(Context c) { + return c + .getDataInfo() + .stream() + .anyMatch(di -> di.getProvenanceaction().getClassid().equals("result:community:project")); + } }