From 271e88537bc29089611b4c3b5b14b7b552ee3eef Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 25 Feb 2021 12:28:56 +0100 Subject: [PATCH] code formatting --- .../dhp/transformation/xslt/Cleaner.java | 8 ++-- .../dhp/transformation/xslt/DateCleaner.java | 4 +- .../transformation/TransformationJobTest.java | 43 ++++++++++--------- 3 files changed, 29 insertions(+), 26 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/Cleaner.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/Cleaner.java index 50ffd304b..664215c0e 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/Cleaner.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/Cleaner.java @@ -1,14 +1,14 @@ package eu.dnetlib.dhp.transformation.xslt; +import static eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction.QNAME_BASE_URI; + +import java.io.Serializable; + import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup; import eu.dnetlib.dhp.schema.oaf.Qualifier; import net.sf.saxon.s9api.*; -import java.io.Serializable; - -import static eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction.QNAME_BASE_URI; - public class Cleaner implements ExtensionFunction, Serializable { private final VocabularyGroup vocabularies; diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/DateCleaner.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/DateCleaner.java index 479dd9854..6e337604f 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/DateCleaner.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/DateCleaner.java @@ -1,6 +1,8 @@ package eu.dnetlib.dhp.transformation.xslt; +import static eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction.QNAME_BASE_URI; + import java.io.Serializable; import java.time.LocalDate; import java.time.format.DateTimeFormatter; @@ -10,8 +12,6 @@ import java.util.regex.Pattern; import net.sf.saxon.s9api.*; -import static eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction.QNAME_BASE_URI; - public class DateCleaner implements ExtensionFunction, Serializable { private final static List dateRegex = Arrays diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java index 50aa2ea08..3c0c8bf0f 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java @@ -76,23 +76,26 @@ public class TransformationJobTest extends AbstractVocabularyTest { conf.setAppName(TransformationJobTest.class.getSimpleName()); conf.setMaster("local"); - try(SparkSession spark = SparkSession.builder().config(conf).getOrCreate()) { + try (SparkSession spark = SparkSession.builder().config(conf).getOrCreate()) { - final String mdstore_input = this.getClass().getResource("/eu/dnetlib/dhp/transform/mdstorenative").getFile(); + final String mdstore_input = this + .getClass() + .getResource("/eu/dnetlib/dhp/transform/mdstorenative") + .getFile(); final String mdstore_output = testDir.toString() + "/version"; mockupTrasformationRule("simpleTRule", "/eu/dnetlib/dhp/transform/ext_simple.xsl"); - final Map parameters = Stream.of(new String[][]{ - { - "dateOfTransformation", "1234" - }, - { - "transformationPlugin", "XSLT_TRANSFORM" - }, - { - "transformationRuleId", "simpleTRule" - }, + final Map parameters = Stream.of(new String[][] { + { + "dateOfTransformation", "1234" + }, + { + "transformationPlugin", "XSLT_TRANSFORM" + }, + { + "transformationRuleId", "simpleTRule" + }, }).collect(Collectors.toMap(data -> data[0], data -> data[1])); @@ -102,20 +105,20 @@ public class TransformationJobTest extends AbstractVocabularyTest { final Encoder encoder = Encoders.bean(MetadataRecord.class); final Dataset mOutput = spark - .read() - .format("parquet") - .load(mdstore_output + MDSTORE_DATA_PATH) - .as(encoder); + .read() + .format("parquet") + .load(mdstore_output + MDSTORE_DATA_PATH) + .as(encoder); final Long total = mOutput.count(); final long recordTs = mOutput - .filter((FilterFunction) p -> p.getDateOfTransformation() == 1234) - .count(); + .filter((FilterFunction) p -> p.getDateOfTransformation() == 1234) + .count(); final long recordNotEmpty = mOutput - .filter((FilterFunction) p -> !StringUtils.isBlank(p.getBody())) - .count(); + .filter((FilterFunction) p -> !StringUtils.isBlank(p.getBody())) + .count(); assertEquals(total, recordTs);