From 23b8883ab1d2a4b238182ecc3b9b50406d1499d2 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Fri, 14 May 2021 10:58:12 +0200 Subject: [PATCH] applied intellij code cleanup --- .../java/eu/dnetlib/dhp/common/DbClient.java | 2 +- .../eu/dnetlib/dhp/common/MakeTarArchive.java | 2 +- .../common/api/InputStreamRequestBody.java | 6 +-- .../dhp/common/rest/DNetRestClient.java | 2 +- .../eu/dnetlib/dhp/message/MessageSender.java | 2 +- .../dhp/schema/oaf/utils/OafMapperUtils.java | 6 +-- .../dhp/utils/ISLookupClientFactory.java | 4 +- .../schema/oaf/utils/OafMapperUtilsTest.java | 2 +- .../PromoteActionPayloadForGraphTableJob.java | 4 +- .../bipfinder/CollectAndSave.java | 1 - .../bipfinder/SparkAtomicActionScoreJob.java | 2 +- .../project/PrepareProgramme.java | 2 +- .../project/PrepareProjects.java | 2 +- .../actionmanager/project/utils/ReadCSV.java | 3 +- .../project/utils/ReadExcel.java | 3 +- .../dhp/aggregation/common/ReportingJob.java | 2 +- .../CollectorWorkerApplication.java | 2 +- .../dhp/collection/HttpConnector2.java | 2 +- .../collection/plugin/oai/OaiIterator.java | 2 +- .../plugin/rest/RestCollectorPlugin.java | 2 +- .../collection/plugin/rest/RestIterator.java | 28 +++++++------- .../transformation/TransformSparkJobNode.java | 2 +- .../transformation/xslt/PersonCleaner.java | 2 +- .../project/EXCELParserTest.java | 2 +- .../plugin/rest/RestCollectorPluginTest.java | 26 ++++++------- .../plugin/rest/RestIteratorTest.java | 28 +++++++------- .../dhp/broker/oa/CheckDuplictedIdsJob.java | 26 ++++++------- .../dhp/broker/oa/GenerateEventsJob.java | 6 ++- .../dhp/broker/oa/GenerateStatsJob.java | 10 ++++- .../dhp/broker/oa/IndexEventSubsetJob.java | 13 +++++-- .../dhp/broker/oa/IndexNotificationsJob.java | 23 ++++++----- .../dnetlib/dhp/broker/oa/JoinStep0Job.java | 9 ++++- .../dnetlib/dhp/broker/oa/JoinStep1Job.java | 4 +- .../dnetlib/dhp/broker/oa/JoinStep2Job.java | 9 ++++- .../dnetlib/dhp/broker/oa/JoinStep3Job.java | 4 +- .../dnetlib/dhp/broker/oa/JoinStep4Job.java | 4 +- .../dhp/broker/oa/PrepareGroupsJob.java | 8 ++-- .../broker/oa/PrepareRelatedDatasetsJob.java | 21 ++++++---- .../oa/PrepareRelatedDatasourcesJob.java | 38 ++++++++++++------- .../broker/oa/PrepareRelatedProjectsJob.java | 24 ++++++++---- .../oa/PrepareRelatedPublicationsJob.java | 19 ++++++---- .../broker/oa/PrepareRelatedSoftwaresJob.java | 30 ++++++++++----- .../broker/oa/PrepareSimpleEntititiesJob.java | 10 +++-- .../dhp/broker/oa/util/ClusterUtils.java | 4 +- .../dhp/broker/oa/util/UpdateInfo.java | 1 - .../dhp/oa/dedup/GroupEntitiesSparkJob.java | 2 +- .../dhp/oa/dedup/RelationAggregator.java | 2 +- .../dhp/oa/dedup/SparkPropagateRelation.java | 2 +- .../dhp/oa/dedup/EntityMergerTest.java | 2 +- .../dnetlib/dhp/oa/dedup/IdGeneratorTest.java | 2 +- .../dhp/oa/dedup/SparkOpenorgsDedupTest.java | 8 ++-- .../doiboost/crossref/CrossrefImporter.java | 3 +- .../orcid/ActivitiesDecompressor.java | 2 +- .../orcid/ExtractXMLActivitiesData.java | 4 +- .../orcid/ExtractXMLSummariesData.java | 4 +- .../orcid/OrcidAuthorsDOIsDataGen.java | 4 +- .../doiboost/orcid/OrcidDSManager.java | 4 +- .../orcid/SparkDownloadOrcidWorks.java | 4 +- .../orcid/SparkGenLastModifiedSeq.java | 2 +- .../orcid/SparkGenerateDoiAuthorList.java | 2 +- .../orcid/SparkUpdateOrcidAuthors.java | 4 +- .../orcid/SparkUpdateOrcidDatasets.java | 2 +- .../doiboost/orcid/SparkUpdateOrcidWorks.java | 2 +- .../doiboost/orcid/SummariesDecompressor.java | 2 +- .../dnetlib/doiboost/orcid/util/HDFSUtil.java | 4 +- .../doiboost/orcid/xml/XMLRecordParser.java | 4 +- .../orcidnodoi/ActivitiesDumpReader.java | 2 +- .../orcidnodoi/GenOrcidAuthorWork.java | 4 +- .../SparkGenEnrichedOrcidWorks.java | 4 +- .../doiboost/orcidnodoi/json/JsonWriter.java | 2 +- .../orcidnodoi/oaf/PublicationToOaf.java | 8 ++-- .../orcidnodoi/similarity/AuthorMatcher.java | 9 ++--- .../orcidnodoi/util/DumpToActionsUtility.java | 2 +- .../doiboost/orcidnodoi/util/Pair.java | 4 +- .../orcidnodoi/xml/XMLRecordParserNoDoi.java | 2 +- .../doiboost/orcid/ElasticSearchTest.java | 16 ++++---- .../doiboost/orcid/OrcidClientTest.java | 8 ++-- .../orcid/xml/XMLRecordParserTest.java | 6 +-- .../orcidnodoi/xml/OrcidNoDoiTest.java | 32 ++++++++-------- .../dnetlib/dhp/bulktag/SparkBulkTagJob.java | 2 +- .../CommunityConfigurationFactory.java | 4 +- .../dhp/bulktag/criteria/VerbResolver.java | 2 +- .../SparkCountryPropagationJob.java | 2 +- .../SparkOrcidToResultFromSemRelJob.java | 6 +-- .../SparkResultToProjectThroughSemRelJob.java | 2 +- .../PrepareResultCommunitySet.java | 2 +- ...kResultToCommunityFromOrganizationJob.java | 2 +- ...parkResultToCommunityThroughSemRelJob.java | 2 +- ...arkResultToOrganizationFromIstRepoJob.java | 2 +- .../dnetlib/dhp/oa/graph/dump/Constants.java | 4 +- .../oa/graph/GraphHiveImporterJobTest.java | 2 +- .../dhp/oa/graph/dump/DumpJobTest.java | 2 +- .../dump/PrepareResultProjectJobTest.java | 2 +- .../oa/graph/dump/UpdateProjectInfoTest.java | 2 +- ...DumpOrganizationProjectDatasourceTest.java | 2 +- .../graph/dump/complete/DumpRelationTest.java | 2 +- .../RelationFromOrganizationTest.java | 2 +- .../ResultLinkedToProjectTest.java | 2 +- .../raw/MigrateDbEntitiesApplicationTest.java | 2 +- .../dhp/provision/update/CrossrefClient.java | 3 +- .../dhp/oa/provision/PrepareRelationsJob.java | 2 +- .../oa/provision/SolrAdminApplication.java | 4 +- .../dhp/oa/provision/XmlIndexingJob.java | 12 +++--- .../provision/utils/RelationPartitioner.java | 3 +- .../oa/provision/utils/XmlRecordFactory.java | 2 +- 105 files changed, 352 insertions(+), 290 deletions(-) diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/DbClient.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/DbClient.java index cedc9bd4d..fabb25f16 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/DbClient.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/DbClient.java @@ -14,7 +14,7 @@ public class DbClient implements Closeable { private static final Log log = LogFactory.getLog(DbClient.class); - private Connection connection; + private final Connection connection; public DbClient(final String address, final String login, final String password) { diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java index 76017d5b7..7dc0e4417 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java @@ -100,7 +100,7 @@ public class MakeTarArchive implements Serializable { BufferedInputStream bis = new BufferedInputStream(is); int count; - byte data[] = new byte[1024]; + byte[] data = new byte[1024]; while ((count = bis.read(data, 0, data.length)) != -1) { ar.write(data, 0, count); } diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/InputStreamRequestBody.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/InputStreamRequestBody.java index c3f393436..c127783e5 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/InputStreamRequestBody.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/InputStreamRequestBody.java @@ -13,9 +13,9 @@ import okio.Source; public class InputStreamRequestBody extends RequestBody { - private InputStream inputStream; - private MediaType mediaType; - private long lenght; + private final InputStream inputStream; + private final MediaType mediaType; + private final long lenght; public static RequestBody create(final MediaType mediaType, final InputStream inputStream, final long len) { diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/rest/DNetRestClient.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/rest/DNetRestClient.java index 853d22bc2..98dabf56a 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/rest/DNetRestClient.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/rest/DNetRestClient.java @@ -21,7 +21,7 @@ public class DNetRestClient { private static final Logger log = LoggerFactory.getLogger(DNetRestClient.class); - private static ObjectMapper mapper = new ObjectMapper(); + private static final ObjectMapper mapper = new ObjectMapper(); public static T doGET(final String url, Class clazz) throws Exception { final HttpGet httpGet = new HttpGet(url); diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/message/MessageSender.java b/dhp-common/src/main/java/eu/dnetlib/dhp/message/MessageSender.java index 0c6eacf99..deeda9beb 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/message/MessageSender.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/message/MessageSender.java @@ -34,7 +34,7 @@ public class MessageSender { private final String workflowId; - private ExecutorService executorService = Executors.newCachedThreadPool(); + private final ExecutorService executorService = Executors.newCachedThreadPool(); public MessageSender(final String dnetMessageEndpoint, final String workflowId) { this.workflowId = workflowId; diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java index c2dae2550..c6a8fd5a7 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java @@ -32,11 +32,11 @@ public class OafMapperUtils { if (ModelSupport.isSubClass(left, Result.class)) { return mergeResults((Result) left, (Result) right); } else if (ModelSupport.isSubClass(left, Datasource.class)) { - ((Datasource) left).mergeFrom((Datasource) right); + left.mergeFrom(right); } else if (ModelSupport.isSubClass(left, Organization.class)) { - ((Organization) left).mergeFrom((Organization) right); + left.mergeFrom(right); } else if (ModelSupport.isSubClass(left, Project.class)) { - ((Project) left).mergeFrom((Project) right); + left.mergeFrom(right); } else { throw new RuntimeException("invalid OafEntity subtype:" + left.getClass().getCanonicalName()); } diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java index 9af390f9c..b326c4159 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java @@ -15,8 +15,8 @@ public class ISLookupClientFactory { private static final Logger log = LoggerFactory.getLogger(ISLookupClientFactory.class); - private static int requestTimeout = 60000 * 10; - private static int connectTimeout = 60000 * 10; + private static final int requestTimeout = 60000 * 10; + private static final int connectTimeout = 60000 * 10; public static ISLookUpService getLookUpService(final String isLookupUrl) { return getServiceStub(ISLookUpService.class, isLookupUrl); diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java index 597f7e79b..7256d6489 100644 --- a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java +++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java @@ -22,7 +22,7 @@ import eu.dnetlib.dhp.schema.oaf.Result; public class OafMapperUtilsTest { - private static ObjectMapper OBJECT_MAPPER = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); @Test diff --git a/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/promote/PromoteActionPayloadForGraphTableJob.java b/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/promote/PromoteActionPayloadForGraphTableJob.java index 0052026d4..7893fcf8b 100644 --- a/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/promote/PromoteActionPayloadForGraphTableJob.java +++ b/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/promote/PromoteActionPayloadForGraphTableJob.java @@ -160,9 +160,9 @@ public class PromoteActionPayloadForGraphTableJob { private static String extractPayload(Row value) { try { - return value. getAs("payload"); + return value.getAs("payload"); } catch (IllegalArgumentException | ClassCastException e) { - logger.error("cannot extract payload from action: {}", value.toString()); + logger.error("cannot extract payload from action: {}", value); throw e; } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/CollectAndSave.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/CollectAndSave.java index 0bebe2fb0..4b9fd33f4 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/CollectAndSave.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/CollectAndSave.java @@ -75,7 +75,6 @@ public class CollectAndSave implements Serializable { .union(sc.sequenceFile(inputPath + "/otherresearchproduct", Text.class, Text.class)) .union(sc.sequenceFile(inputPath + "/software", Text.class, Text.class)) .saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class); - ; } private static void removeOutputDir(SparkSession spark, String path) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/SparkAtomicActionScoreJob.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/SparkAtomicActionScoreJob.java index 2cd37d9ea..cea8c2891 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/SparkAtomicActionScoreJob.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/bipfinder/SparkAtomicActionScoreJob.java @@ -36,7 +36,7 @@ import scala.Tuple2; */ public class SparkAtomicActionScoreJob implements Serializable { - private static String DOI = "doi"; + private static final String DOI = "doi"; private static final Logger log = LoggerFactory.getLogger(SparkAtomicActionScoreJob.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java index b2d3253d5..e5a79300e 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java @@ -249,7 +249,7 @@ public class PrepareProgramme { parent = parent.substring(parent.lastIndexOf("|") + 1).trim(); } if (current.trim().length() > parent.length() - && current.toLowerCase().trim().substring(0, parent.length()).equals(parent)) { + && current.toLowerCase().trim().startsWith(parent)) { current = current.substring(parent.length() + 1); if (current.trim().charAt(0) == '-' || current.trim().charAt(0) == '–') { current = current.trim().substring(1).trim(); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java index e5cae0ff7..3ef98e021 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java @@ -93,7 +93,7 @@ public class PrepareProjects { } private static FlatMapFunction, CSVProject> getTuple2CSVProjectFlatMapFunction() { - return (FlatMapFunction, CSVProject>) value -> { + return value -> { Optional csvProject = Optional.ofNullable(value._2()); List csvProjectList = new ArrayList<>(); if (csvProject.isPresent()) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadCSV.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadCSV.java index cad6b94e1..c73f7ec3d 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadCSV.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadCSV.java @@ -28,7 +28,7 @@ public class ReadCSV implements Closeable { private final Configuration conf; private final BufferedWriter writer; private final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private String csvFile; + private final String csvFile; public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -85,7 +85,6 @@ public class ReadCSV implements Closeable { this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); this.csvFile = httpConnector.getInputSource(fileURL); - ; } protected void write(final Object p) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java index fc3b38ac5..f05ed9c2c 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java @@ -25,7 +25,7 @@ public class ReadExcel implements Closeable { private final Configuration conf; private final BufferedWriter writer; private final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private InputStream excelFile; + private final InputStream excelFile; public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -82,7 +82,6 @@ public class ReadExcel implements Closeable { this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); this.excelFile = httpConnector.getInputSourceAsStream(fileURL); - ; } protected void write(final Object p) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/aggregation/common/ReportingJob.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/aggregation/common/ReportingJob.java index 791226034..9926f1688 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/aggregation/common/ReportingJob.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/aggregation/common/ReportingJob.java @@ -18,7 +18,7 @@ public abstract class ReportingJob { */ public static final int INITIAL_DELAY = 2; - private ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); + private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); protected final AggregatorReport report; diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/CollectorWorkerApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/CollectorWorkerApplication.java index 704e7bb93..545cbab0c 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/CollectorWorkerApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/CollectorWorkerApplication.java @@ -30,7 +30,7 @@ public class CollectorWorkerApplication { private static final Logger log = LoggerFactory.getLogger(CollectorWorkerApplication.class); - private FileSystem fileSystem; + private final FileSystem fileSystem; public CollectorWorkerApplication(FileSystem fileSystem) { this.fileSystem = fileSystem; diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/HttpConnector2.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/HttpConnector2.java index 9d8b8d34b..3782a19e4 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/HttpConnector2.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/HttpConnector2.java @@ -32,7 +32,7 @@ public class HttpConnector2 { private String responseType = null; - private String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)"; + private final String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)"; public HttpConnector2() { this(new HttpClientParams()); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/oai/OaiIterator.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/oai/OaiIterator.java index 65695fe8e..12d313108 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/oai/OaiIterator.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/oai/OaiIterator.java @@ -42,7 +42,7 @@ public class OaiIterator implements Iterator { private String token; private boolean started; private final HttpConnector2 httpConnector; - private AggregatorReport report; + private final AggregatorReport report; public OaiIterator( final String baseUrl, diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPlugin.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPlugin.java index e59db143a..be2bbcece 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPlugin.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPlugin.java @@ -26,7 +26,7 @@ public class RestCollectorPlugin implements CollectorPlugin { public static final String RESULT_SIZE_VALUE_DEFAULT = "100"; - private HttpClientParams clientParams; + private final HttpClientParams clientParams; public RestCollectorPlugin(HttpClientParams clientParams) { this.clientParams = clientParams; diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestIterator.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestIterator.java index 16d509aa8..764c21fc2 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestIterator.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/rest/RestIterator.java @@ -48,18 +48,18 @@ public class RestIterator implements Iterator { private static final Logger log = LoggerFactory.getLogger(RestIterator.class); public static final String UTF_8 = "UTF-8"; - private HttpClientParams clientParams; + private final HttpClientParams clientParams; private final String BASIC = "basic"; - private JsonUtils jsonUtils; + private final JsonUtils jsonUtils; - private String baseUrl; - private String resumptionType; - private String resumptionParam; - private String resultFormatValue; + private final String baseUrl; + private final String resumptionType; + private final String resumptionParam; + private final String resultFormatValue; private String queryParams; - private int resultSizeValue; + private final int resultSizeValue; private int resumptionInt = 0; // integer resumption token (first record to harvest) private int resultTotal = -1; private String resumptionStr = Integer.toString(resumptionInt); // string resumption token (first record to harvest @@ -71,11 +71,11 @@ public class RestIterator implements Iterator { private XPathExpression xprResultTotalPath; private XPathExpression xprResumptionPath; private XPathExpression xprEntity; - private String queryFormat; - private String querySize; - private String authMethod; - private String authToken; - private Queue recordQueue = new PriorityBlockingQueue(); + private final String queryFormat; + private final String querySize; + private final String authMethod; + private final String authToken; + private final Queue recordQueue = new PriorityBlockingQueue(); private int discoverResultSize = 0; private int pagination = 1; /* @@ -83,7 +83,7 @@ public class RestIterator implements Iterator { * json. useful for cases when the target API expects a resultFormatValue != json, but the results are returned in * json. An example is the EU Open Data Portal API: resultFormatValue=standard, results are in json format. */ - private String resultOutputFormat; + private final String resultOutputFormat; /** RestIterator class * compatible to version 1.3.33 @@ -229,7 +229,7 @@ public class RestIterator implements Iterator { resultStream = theHttpInputStream; if ("json".equals(resultOutputFormat)) { - resultJson = IOUtils.toString(resultStream, UTF_8); + resultJson = IOUtils.toString(resultStream, StandardCharsets.UTF_8); resultXml = jsonUtils.convertToXML(resultJson); resultStream = IOUtils.toInputStream(resultXml, UTF_8); } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java index 4673a2394..c7201a267 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java @@ -36,7 +36,7 @@ public class TransformSparkJobNode { private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class); - private static int RECORDS_PER_TASK = 200; + private static final int RECORDS_PER_TASK = 200; public static void main(String[] args) throws Exception { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/PersonCleaner.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/PersonCleaner.java index c4cf345f0..e3d588858 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/PersonCleaner.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/xslt/PersonCleaner.java @@ -26,7 +26,7 @@ public class PersonCleaner implements ExtensionFunction, Serializable { private List surname = Lists.newArrayList(); private List fullname = Lists.newArrayList(); - private static Set particles = null; + private static final Set particles = null; public PersonCleaner() { diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java index acb4caa22..f0557ec4e 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java @@ -20,7 +20,7 @@ import eu.dnetlib.dhp.collection.HttpConnector2; public class EXCELParserTest { private static Path workingDir; - private HttpConnector2 httpConnector = new HttpConnector2(); + private final HttpConnector2 httpConnector = new HttpConnector2(); private static final String URL = "http://cordis.europa.eu/data/reference/cordisref-H2020topics.xlsx"; @BeforeAll diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPluginTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPluginTest.java index 648ac85fb..efe925175 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPluginTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestCollectorPluginTest.java @@ -25,22 +25,22 @@ public class RestCollectorPluginTest { private static final Logger log = LoggerFactory.getLogger(RestCollectorPluginTest.class); - private String baseUrl = "https://share.osf.io/api/v2/search/creativeworks/_search"; - private String resumptionType = "count"; - private String resumptionParam = "from"; - private String entityXpath = "//hits/hits"; - private String resumptionXpath = "//hits"; - private String resultTotalXpath = "//hits/total"; - private String resultFormatParam = "format"; - private String resultFormatValue = "json"; - private String resultSizeParam = "size"; - private String resultSizeValue = "10"; + private final String baseUrl = "https://share.osf.io/api/v2/search/creativeworks/_search"; + private final String resumptionType = "count"; + private final String resumptionParam = "from"; + private final String entityXpath = "//hits/hits"; + private final String resumptionXpath = "//hits"; + private final String resultTotalXpath = "//hits/total"; + private final String resultFormatParam = "format"; + private final String resultFormatValue = "json"; + private final String resultSizeParam = "size"; + private final String resultSizeValue = "10"; // private String query = "q=%28sources%3ASocArXiv+AND+type%3Apreprint%29"; - private String query = "q=%28sources%3AengrXiv+AND+type%3Apreprint%29"; + private final String query = "q=%28sources%3AengrXiv+AND+type%3Apreprint%29"; // private String query = "=(sources:engrXiv AND type:preprint)"; - private String protocolDescriptor = "rest_json2xml"; - private ApiDescriptor api = new ApiDescriptor(); + private final String protocolDescriptor = "rest_json2xml"; + private final ApiDescriptor api = new ApiDescriptor(); private RestCollectorPlugin rcp; @BeforeEach diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestIteratorTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestIteratorTest.java index 16604e0eb..9f75bd468 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestIteratorTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/plugin/rest/RestIteratorTest.java @@ -20,20 +20,20 @@ public class RestIteratorTest { private static final Logger log = LoggerFactory.getLogger(RestIteratorTest.class); - private String baseUrl = "https://share.osf.io/api/v2/search/creativeworks/_search"; - private String resumptionType = "count"; - private String resumptionParam = "from"; - private String resumptionXpath = ""; - private String resultTotalXpath = "//hits/total"; - private String entityXpath = "//hits/hits"; - private String resultFormatParam = "format"; - private String resultFormatValue = "Json"; // Change from lowerCase to one UpperCase - private String resultSizeParam = "size"; - private String resultSizeValue = "10"; - private String authMethod = ""; - private String authToken = ""; - private String resultOffsetParam = "cursor"; - private String query = "q=%28sources%3ASocArXiv+AND+type%3Apreprint%29"; + private final String baseUrl = "https://share.osf.io/api/v2/search/creativeworks/_search"; + private final String resumptionType = "count"; + private final String resumptionParam = "from"; + private final String resumptionXpath = ""; + private final String resultTotalXpath = "//hits/total"; + private final String entityXpath = "//hits/hits"; + private final String resultFormatParam = "format"; + private final String resultFormatValue = "Json"; // Change from lowerCase to one UpperCase + private final String resultSizeParam = "size"; + private final String resultSizeValue = "10"; + private final String authMethod = ""; + private final String authToken = ""; + private final String resultOffsetParam = "cursor"; + private final String query = "q=%28sources%3ASocArXiv+AND+type%3Apreprint%29"; @Disabled @Test diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/CheckDuplictedIdsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/CheckDuplictedIdsJob.java index d42c692f7..89fc2e703 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/CheckDuplictedIdsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/CheckDuplictedIdsJob.java @@ -4,6 +4,8 @@ package eu.dnetlib.dhp.broker.oa; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; @@ -47,26 +49,22 @@ public class CheckDuplictedIdsJob { final LongAccumulator total = spark.sparkContext().longAccumulator("invaild_event_id"); - final TypedColumn, Tuple2> agg = new CountAggregator().toColumn(); - + final Encoder> encoder = Encoders.tuple(Encoders.STRING(), Encoders.LONG()); ClusterUtils .readPath(spark, eventsPath, Event.class) - .map(e -> new Tuple2<>(e.getEventId(), 1l), Encoders.tuple(Encoders.STRING(), Encoders.LONG())) - .groupByKey(t -> t._1, Encoders.STRING()) - .agg(agg) - .map(t -> t._2, Encoders.tuple(Encoders.STRING(), Encoders.LONG())) - .filter(t -> t._2 > 1) - .map(o -> ClusterUtils.incrementAccumulator(o, total), Encoders.tuple(Encoders.STRING(), Encoders.LONG())) + .map((MapFunction>) e -> new Tuple2<>(e.getEventId(), 1l), encoder) + .groupByKey((MapFunction, String>) t -> t._1, Encoders.STRING()) + .agg(new CountAggregator().toColumn()) + .map((MapFunction>, Tuple2>) t -> t._2, encoder) + .filter((FilterFunction>) t -> t._2 > 1) + .map( + (MapFunction, Tuple2>) o -> ClusterUtils + .incrementAccumulator(o, total), + encoder) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") .json(countPath); - ; - - } - - private static String eventAsJsonString(final Event f) throws JsonProcessingException { - return new ObjectMapper().writeValueAsString(f); } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsJob.java index 1ae241e34..c4813984c 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsJob.java @@ -12,6 +12,8 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; +import org.apache.spark.api.java.function.FlatMapFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.util.LongAccumulator; @@ -77,11 +79,11 @@ public class GenerateEventsJob { final Dataset dataset = groups .map( - g -> EventFinder + (MapFunction) g -> EventFinder .generateEvents(g, dsIdWhitelist, dsIdBlacklist, dsTypeWhitelist, topicWhitelist, accumulators), Encoders .bean(EventGroup.class)) - .flatMap(g -> g.getData().iterator(), Encoders.bean(Event.class)); + .flatMap((FlatMapFunction) g -> g.getData().iterator(), Encoders.bean(Event.class)); ClusterUtils.save(dataset, eventsPath, Event.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateStatsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateStatsJob.java index 2772f8fd1..a4fb20b1c 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateStatsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateStatsJob.java @@ -13,6 +13,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.TypedColumn; @@ -24,6 +25,7 @@ import eu.dnetlib.dhp.broker.model.Event; import eu.dnetlib.dhp.broker.oa.util.ClusterUtils; import eu.dnetlib.dhp.broker.oa.util.aggregators.stats.DatasourceStats; import eu.dnetlib.dhp.broker.oa.util.aggregators.stats.StatsAggregator; +import scala.Tuple2; public class GenerateStatsJob { @@ -71,9 +73,13 @@ public class GenerateStatsJob { ClusterUtils .readPath(spark, eventsPath, Event.class) - .groupByKey(e -> e.getTopic() + "@@@" + e.getMap().getTargetDatasourceId(), Encoders.STRING()) + .groupByKey( + (MapFunction) e -> e.getTopic() + "@@@" + e.getMap().getTargetDatasourceId(), + Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(DatasourceStats.class)) + .map( + (MapFunction, DatasourceStats>) t -> t._2, + Encoders.bean(DatasourceStats.class)) .write() .mode(SaveMode.Overwrite) .jdbc(dbUrl, "oa_datasource_stats_temp", connectionProperties); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexEventSubsetJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexEventSubsetJob.java index e18a7ef56..05ff2aa38 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexEventSubsetJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexEventSubsetJob.java @@ -13,6 +13,8 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.function.FlatMapFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SparkSession; @@ -30,6 +32,7 @@ import eu.dnetlib.dhp.broker.model.Event; import eu.dnetlib.dhp.broker.oa.util.ClusterUtils; import eu.dnetlib.dhp.broker.oa.util.EventGroup; import eu.dnetlib.dhp.broker.oa.util.aggregators.subset.EventSubsetAggregator; +import scala.Tuple2; public class IndexEventSubsetJob { @@ -83,13 +86,15 @@ public class IndexEventSubsetJob { final Dataset subset = ClusterUtils .readPath(spark, eventsPath, Event.class) - .groupByKey(e -> e.getTopic() + '@' + e.getMap().getTargetDatasourceId(), Encoders.STRING()) + .groupByKey( + (MapFunction) e -> e.getTopic() + '@' + e.getMap().getTargetDatasourceId(), + Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(EventGroup.class)) - .flatMap(g -> g.getData().iterator(), Encoders.bean(Event.class)); + .map((MapFunction, EventGroup>) t -> t._2, Encoders.bean(EventGroup.class)) + .flatMap((FlatMapFunction) g -> g.getData().iterator(), Encoders.bean(Event.class)); final JavaRDD inputRdd = subset - .map(e -> prepareEventForIndexing(e, now, total), Encoders.STRING()) + .map((MapFunction) e -> prepareEventForIndexing(e, now, total), Encoders.STRING()) .javaRDD(); final Map esCfg = new HashMap<>(); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexNotificationsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexNotificationsJob.java index 75f4eb066..80549e1ce 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexNotificationsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/IndexNotificationsJob.java @@ -18,7 +18,10 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.function.FlatMapFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SparkSession; import org.apache.spark.util.LongAccumulator; @@ -89,13 +92,17 @@ public class IndexNotificationsJob { log.info("Number of subscriptions: " + subscriptions.size()); if (subscriptions.size() > 0) { + final Encoder ngEncoder = Encoders.bean(NotificationGroup.class); + final Encoder nEncoder = Encoders.bean(Notification.class); final Dataset notifications = ClusterUtils .readPath(spark, eventsPath, Event.class) - .map(e -> generateNotifications(e, subscriptions, startTime), Encoders.bean(NotificationGroup.class)) - .flatMap(g -> g.getData().iterator(), Encoders.bean(Notification.class)); + .map( + (MapFunction) e -> generateNotifications(e, subscriptions, startTime), + ngEncoder) + .flatMap((FlatMapFunction) g -> g.getData().iterator(), nEncoder); final JavaRDD inputRdd = notifications - .map(n -> prepareForIndexing(n, total), Encoders.STRING()) + .map((MapFunction) n -> prepareForIndexing(n, total), Encoders.STRING()) .javaRDD(); final Map esCfg = new HashMap<>(); @@ -192,15 +199,11 @@ public class IndexNotificationsJob { return false; } - if (conditions.containsKey("targetSubjects") - && !conditions + return !conditions.containsKey("targetSubjects") + || conditions .get("targetSubjects") .stream() - .allMatch(c -> SubscriptionUtils.verifyListExact(map.getTargetSubjects(), c.getValue()))) { - return false; - } - - return true; + .allMatch(c -> SubscriptionUtils.verifyListExact(map.getTargetSubjects(), c.getValue())); } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep0Job.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep0Job.java index 01778ad74..9bbb952e0 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep0Job.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep0Job.java @@ -7,6 +7,7 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.TypedColumn; @@ -67,9 +68,13 @@ public class JoinStep0Job { final Dataset dataset = sources .joinWith(typedRels, sources.col("openaireId").equalTo(typedRels.col("source")), "left_outer") - .groupByKey(t -> t._1.getOpenaireId(), Encoders.STRING()) + .groupByKey( + (MapFunction, String>) t -> t._1.getOpenaireId(), + Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(OaBrokerMainEntity.class)); + .map( + (MapFunction, OaBrokerMainEntity>) t -> t._2, + Encoders.bean(OaBrokerMainEntity.class)); ClusterUtils.save(dataset, joinedEntitiesPath, OaBrokerMainEntity.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep1Job.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep1Job.java index 82c3619e1..ea3587c8d 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep1Job.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep1Job.java @@ -69,7 +69,9 @@ public class JoinStep1Job { (MapFunction, String>) t -> t._1.getOpenaireId(), Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(OaBrokerMainEntity.class)); + .map( + (MapFunction, OaBrokerMainEntity>) t -> t._2, + Encoders.bean(OaBrokerMainEntity.class)); ClusterUtils.save(dataset, joinedEntitiesPath, OaBrokerMainEntity.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep2Job.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep2Job.java index bd6135d41..42c5b1c7e 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep2Job.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep2Job.java @@ -7,6 +7,7 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.TypedColumn; @@ -64,9 +65,13 @@ public class JoinStep2Job { final Dataset dataset = sources .joinWith(typedRels, sources.col("openaireId").equalTo(typedRels.col("source")), "left_outer") - .groupByKey(t -> t._1.getOpenaireId(), Encoders.STRING()) + .groupByKey( + (MapFunction, String>) t -> t._1.getOpenaireId(), + Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(OaBrokerMainEntity.class)); + .map( + (MapFunction, OaBrokerMainEntity>) t -> t._2, + Encoders.bean(OaBrokerMainEntity.class)); ClusterUtils.save(dataset, joinedEntitiesPath, OaBrokerMainEntity.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep3Job.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep3Job.java index 18e8c00b2..f329aa968 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep3Job.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep3Job.java @@ -69,7 +69,9 @@ public class JoinStep3Job { (MapFunction, String>) t -> t._1.getOpenaireId(), Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(OaBrokerMainEntity.class)); + .map( + (MapFunction, OaBrokerMainEntity>) t -> t._2, + Encoders.bean(OaBrokerMainEntity.class)); ClusterUtils.save(dataset, joinedEntitiesPath, OaBrokerMainEntity.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep4Job.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep4Job.java index 965530362..65a3307a7 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep4Job.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/JoinStep4Job.java @@ -69,7 +69,9 @@ public class JoinStep4Job { (MapFunction, String>) t -> t._1.getOpenaireId(), Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(OaBrokerMainEntity.class)); + .map( + (MapFunction, OaBrokerMainEntity>) t -> t._2, + Encoders.bean(OaBrokerMainEntity.class)); ClusterUtils.save(dataset, joinedEntitiesPath, OaBrokerMainEntity.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareGroupsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareGroupsJob.java index dc156cbcf..80abe7c5a 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareGroupsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareGroupsJob.java @@ -7,6 +7,7 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -64,7 +65,7 @@ public class PrepareGroupsJob { final Dataset mergedRels = ClusterUtils .loadRelations(graphPath, spark) - .filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)); + .filter((FilterFunction) r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)); final TypedColumn, ResultGroup> aggr = new ResultAggregator() .toColumn(); @@ -75,8 +76,9 @@ public class PrepareGroupsJob { (MapFunction, String>) t -> t._2.getTarget(), Encoders.STRING()) .agg(aggr) - .map(t -> t._2, Encoders.bean(ResultGroup.class)) - .filter(rg -> rg.getData().size() > 1); + .map( + (MapFunction, ResultGroup>) t -> t._2, Encoders.bean(ResultGroup.class)) + .filter((FilterFunction) rg -> rg.getData().size() > 1); ClusterUtils.save(dataset, groupsPath, ResultGroup.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasetsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasetsJob.java index 9bdf32a64..260d85b10 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasetsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasetsJob.java @@ -7,6 +7,8 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.util.LongAccumulator; @@ -20,6 +22,7 @@ import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedDataset; import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.oaf.Relation; +import scala.Tuple2; public class PrepareRelatedDatasetsJob { @@ -58,20 +61,22 @@ public class PrepareRelatedDatasetsJob { final Dataset datasets = ClusterUtils .readPath(spark, graphPath + "/dataset", eu.dnetlib.dhp.schema.oaf.Dataset.class) - .filter(d -> !ClusterUtils.isDedupRoot(d.getId())) - .map(ConversionUtils::oafDatasetToBrokerDataset, Encoders.bean(OaBrokerRelatedDataset.class)); + .filter((FilterFunction) d -> !ClusterUtils.isDedupRoot(d.getId())) + .map( + (MapFunction) ConversionUtils::oafDatasetToBrokerDataset, + Encoders.bean(OaBrokerRelatedDataset.class)); final Dataset rels = ClusterUtils .loadRelations(graphPath, spark) - .filter(r -> r.getDataInfo().getDeletedbyinference()) - .filter(r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) - .filter(r -> ClusterUtils.isValidResultResultClass(r.getRelClass())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getSource())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getTarget())); + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) + .filter((FilterFunction) r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) + .filter((FilterFunction) r -> ClusterUtils.isValidResultResultClass(r.getRelClass())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getSource())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getTarget())); final Dataset dataset = rels .joinWith(datasets, datasets.col("openaireId").equalTo(rels.col("target")), "inner") - .map(t -> { + .map((MapFunction, RelatedDataset>) t -> { final RelatedDataset rel = new RelatedDataset(t._1.getSource(), t._2); rel.getRelDataset().setRelType(t._1.getRelClass()); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasourcesJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasourcesJob.java index 0c2318127..61ab5e250 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasourcesJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedDatasourcesJob.java @@ -7,6 +7,9 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.FlatMapFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SparkSession; @@ -25,6 +28,7 @@ import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Software; +import scala.Tuple2; import scala.Tuple3; public class PrepareRelatedDatasourcesJob { @@ -70,17 +74,20 @@ public class PrepareRelatedDatasourcesJob { final Dataset datasources = ClusterUtils .readPath(spark, graphPath + "/datasource", Datasource.class) - .map(ConversionUtils::oafDatasourceToBrokerDatasource, Encoders.bean(OaBrokerRelatedDatasource.class)); + .map( + (MapFunction) ConversionUtils::oafDatasourceToBrokerDatasource, + Encoders.bean(OaBrokerRelatedDatasource.class)); final Dataset dataset = rels .joinWith(datasources, datasources.col("openaireId").equalTo(rels.col("_2")), "inner") - .map(t -> { - final RelatedDatasource r = new RelatedDatasource(); - r.setSource(t._1._1()); - r.setRelDatasource(t._2); - r.getRelDatasource().setRelType(t._1._3()); - return r; - }, Encoders.bean(RelatedDatasource.class)); + .map( + (MapFunction, OaBrokerRelatedDatasource>, RelatedDatasource>) t -> { + final RelatedDatasource r = new RelatedDatasource(); + r.setSource(t._1._1()); + r.setRelDatasource(t._2); + r.getRelDatasource().setRelType(t._1._3()); + return r; + }, Encoders.bean(RelatedDatasource.class)); ClusterUtils.save(dataset, relsPath, RelatedDatasource.class, total); @@ -88,19 +95,22 @@ public class PrepareRelatedDatasourcesJob { } - private static final Dataset> prepareResultTuples(final SparkSession spark, + private static final Dataset> prepareResultTuples( + final SparkSession spark, final String graphPath, - final Class sourceClass) { + final Class sourceClass) { return ClusterUtils .readPath(spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), sourceClass) - .filter(r -> !ClusterUtils.isDedupRoot(r.getId())) - .filter(r -> r.getDataInfo().getDeletedbyinference()) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getId())) + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) .map( - r -> DatasourceRelationsAccumulator.calculateTuples(r), + (MapFunction) r -> DatasourceRelationsAccumulator.calculateTuples(r), Encoders.bean(DatasourceRelationsAccumulator.class)) .flatMap( - acc -> acc.getRels().iterator(), + (FlatMapFunction>) acc -> acc + .getRels() + .iterator(), Encoders.tuple(Encoders.STRING(), Encoders.STRING(), Encoders.STRING())); } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedProjectsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedProjectsJob.java index 9498c0f33..5ff469cd0 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedProjectsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedProjectsJob.java @@ -7,6 +7,8 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.util.LongAccumulator; @@ -22,6 +24,7 @@ import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject; import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.oaf.Project; import eu.dnetlib.dhp.schema.oaf.Relation; +import scala.Tuple2; public class PrepareRelatedProjectsJob { @@ -60,20 +63,25 @@ public class PrepareRelatedProjectsJob { final Dataset projects = ClusterUtils .readPath(spark, graphPath + "/project", Project.class) - .filter(p -> !ClusterUtils.isDedupRoot(p.getId())) - .map(ConversionUtils::oafProjectToBrokerProject, Encoders.bean(OaBrokerProject.class)); + .filter((FilterFunction) p -> !ClusterUtils.isDedupRoot(p.getId())) + .map( + (MapFunction) ConversionUtils::oafProjectToBrokerProject, + Encoders.bean(OaBrokerProject.class)); final Dataset rels = ClusterUtils .loadRelations(graphPath, spark) - .filter(r -> r.getDataInfo().getDeletedbyinference()) - .filter(r -> r.getRelType().equals(ModelConstants.RESULT_PROJECT)) - .filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)) - .filter(r -> !ClusterUtils.isDedupRoot(r.getSource())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getTarget())); + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) + .filter((FilterFunction) r -> r.getRelType().equals(ModelConstants.RESULT_PROJECT)) + .filter((FilterFunction) r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getSource())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getTarget())); final Dataset dataset = rels .joinWith(projects, projects.col("openaireId").equalTo(rels.col("target")), "inner") - .map(t -> new RelatedProject(t._1.getSource(), t._2), Encoders.bean(RelatedProject.class)); + .map( + (MapFunction, RelatedProject>) t -> new RelatedProject( + t._1.getSource(), t._2), + Encoders.bean(RelatedProject.class)); ClusterUtils.save(dataset, relsPath, RelatedProject.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedPublicationsJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedPublicationsJob.java index 8270500fd..9e9261731 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedPublicationsJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedPublicationsJob.java @@ -7,6 +7,8 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.util.LongAccumulator; @@ -21,6 +23,7 @@ import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedPublication; import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Relation; +import scala.Tuple2; public class PrepareRelatedPublicationsJob { @@ -59,22 +62,22 @@ public class PrepareRelatedPublicationsJob { final Dataset pubs = ClusterUtils .readPath(spark, graphPath + "/publication", Publication.class) - .filter(p -> !ClusterUtils.isDedupRoot(p.getId())) + .filter((FilterFunction) p -> !ClusterUtils.isDedupRoot(p.getId())) .map( - ConversionUtils::oafPublicationToBrokerPublication, + (MapFunction) ConversionUtils::oafPublicationToBrokerPublication, Encoders.bean(OaBrokerRelatedPublication.class)); final Dataset rels = ClusterUtils .loadRelations(graphPath, spark) - .filter(r -> r.getDataInfo().getDeletedbyinference()) - .filter(r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) - .filter(r -> ClusterUtils.isValidResultResultClass(r.getRelClass())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getSource())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getTarget())); + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) + .filter((FilterFunction) r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) + .filter((FilterFunction) r -> ClusterUtils.isValidResultResultClass(r.getRelClass())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getSource())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getTarget())); final Dataset dataset = rels .joinWith(pubs, pubs.col("openaireId").equalTo(rels.col("target")), "inner") - .map(t -> { + .map((MapFunction, RelatedPublication>) t -> { final RelatedPublication rel = new RelatedPublication( t._1.getSource(), t._2); rel.getRelPublication().setRelType(t._1.getRelClass()); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedSoftwaresJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedSoftwaresJob.java index 16b450733..a1bb12c56 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedSoftwaresJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareRelatedSoftwaresJob.java @@ -7,7 +7,10 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.util.LongAccumulator; import org.slf4j.Logger; @@ -22,6 +25,7 @@ import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedSoftware; import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Software; +import scala.Tuple2; public class PrepareRelatedSoftwaresJob { @@ -58,22 +62,30 @@ public class PrepareRelatedSoftwaresJob { final LongAccumulator total = spark.sparkContext().longAccumulator("total_rels"); + final Encoder obrsEncoder = Encoders.bean(OaBrokerRelatedSoftware.class); final Dataset softwares = ClusterUtils .readPath(spark, graphPath + "/software", Software.class) - .filter(sw -> !ClusterUtils.isDedupRoot(sw.getId())) - .map(ConversionUtils::oafSoftwareToBrokerSoftware, Encoders.bean(OaBrokerRelatedSoftware.class)); + .filter((FilterFunction) sw -> !ClusterUtils.isDedupRoot(sw.getId())) + .map( + (MapFunction) ConversionUtils::oafSoftwareToBrokerSoftware, + obrsEncoder); - final Dataset rels = ClusterUtils + final Dataset rels; + rels = ClusterUtils .loadRelations(graphPath, spark) - .filter(r -> r.getDataInfo().getDeletedbyinference()) - .filter(r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) - .filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)) - .filter(r -> !ClusterUtils.isDedupRoot(r.getSource())) - .filter(r -> !ClusterUtils.isDedupRoot(r.getTarget())); + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) + .filter((FilterFunction) r -> r.getRelType().equals(ModelConstants.RESULT_RESULT)) + .filter((FilterFunction) r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getSource())) + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getTarget())); + final Encoder rsEncoder = Encoders.bean(RelatedSoftware.class); final Dataset dataset = rels .joinWith(softwares, softwares.col("openaireId").equalTo(rels.col("target")), "inner") - .map(t -> new RelatedSoftware(t._1.getSource(), t._2), Encoders.bean(RelatedSoftware.class)); + .map( + (MapFunction, RelatedSoftware>) t -> new RelatedSoftware( + t._1.getSource(), t._2), + rsEncoder); ClusterUtils.save(dataset, relsPath, RelatedSoftware.class, total); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareSimpleEntititiesJob.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareSimpleEntititiesJob.java index cf4450603..4cb3ecfd9 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareSimpleEntititiesJob.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/PrepareSimpleEntititiesJob.java @@ -7,7 +7,10 @@ import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SparkSession; import org.apache.spark.util.LongAccumulator; @@ -73,11 +76,12 @@ public class PrepareSimpleEntititiesJob { final String graphPath, final Class sourceClass) { + final Encoder encoder = Encoders.bean(OaBrokerMainEntity.class); return ClusterUtils .readPath(spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), sourceClass) - .filter(r -> !ClusterUtils.isDedupRoot(r.getId())) - .filter(r -> r.getDataInfo().getDeletedbyinference()) - .map(ConversionUtils::oafResultToBrokerResult, Encoders.bean(OaBrokerMainEntity.class)); + .filter((FilterFunction) r -> !ClusterUtils.isDedupRoot(r.getId())) + .filter((FilterFunction) r -> r.getDataInfo().getDeletedbyinference()) + .map((MapFunction) ConversionUtils::oafResultToBrokerResult, encoder); } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ClusterUtils.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ClusterUtils.java index 9ce64f6bd..c7be633a9 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ClusterUtils.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ClusterUtils.java @@ -34,7 +34,7 @@ public class ClusterUtils { public static Dataset loadRelations(final String graphPath, final SparkSession spark) { return ClusterUtils .readPath(spark, graphPath + "/relation", Relation.class) - .map(r -> { + .map((MapFunction) r -> { r.setSource(ConversionUtils.cleanOpenaireId(r.getSource())); r.setTarget(ConversionUtils.cleanOpenaireId(r.getTarget())); return r; @@ -75,7 +75,7 @@ public class ClusterUtils { final Class clazz, final LongAccumulator acc) { dataset - .map(o -> ClusterUtils.incrementAccumulator(o, acc), Encoders.bean(clazz)) + .map((MapFunction) o -> ClusterUtils.incrementAccumulator(o, acc), Encoders.bean(clazz)) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java index fca954247..5a9cb5e09 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java @@ -105,7 +105,6 @@ public final class UpdateInfo { .map(OaBrokerInstance::getUrl) .findFirst() .orElse(null); - ; final OaBrokerProvenance provenance = new OaBrokerProvenance(provId, provRepo, provType, provUrl); diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java index 80d25da4a..58009bfcf 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java +++ b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java @@ -44,7 +44,7 @@ public class GroupEntitiesSparkJob { private final static String ID_JPATH = "$.id"; - private static ObjectMapper OBJECT_MAPPER = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); public static void main(String[] args) throws Exception { diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/RelationAggregator.java b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/RelationAggregator.java index 6fb7b844b..96d783dbf 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/RelationAggregator.java +++ b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/RelationAggregator.java @@ -11,7 +11,7 @@ import eu.dnetlib.dhp.schema.oaf.Relation; public class RelationAggregator extends Aggregator { - private static Relation ZERO = new Relation(); + private static final Relation ZERO = new Relation(); @Override public Relation zero() { diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java index 1fe83cec2..220b0f483 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java +++ b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java @@ -161,7 +161,7 @@ public class SparkPropagateRelation extends AbstractSparkAction { } private FilterFunction getRelationFilterFunction() { - return (FilterFunction) r -> StringUtils.isNotBlank(r.getSource()) || + return r -> StringUtils.isNotBlank(r.getSource()) || StringUtils.isNotBlank(r.getTarget()) || StringUtils.isNotBlank(r.getRelClass()) || StringUtils.isNotBlank(r.getSubRelType()) || diff --git a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/EntityMergerTest.java b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/EntityMergerTest.java index 654ede498..80154fbb7 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/EntityMergerTest.java +++ b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/EntityMergerTest.java @@ -30,7 +30,7 @@ public class EntityMergerTest implements Serializable { private String testEntityBasePath; private DataInfo dataInfo; - private String dedupId = "00|dedup_id::1"; + private final String dedupId = "00|dedup_id::1"; private Publication pub_top; @BeforeEach diff --git a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/IdGeneratorTest.java b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/IdGeneratorTest.java index 294d24031..1a279fac7 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/IdGeneratorTest.java +++ b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/IdGeneratorTest.java @@ -30,7 +30,7 @@ import scala.Tuple2; @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class IdGeneratorTest { - private static ObjectMapper OBJECT_MAPPER = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); private static List> bestIds; diff --git a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkOpenorgsDedupTest.java b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkOpenorgsDedupTest.java index 8154a87ef..97cfab118 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkOpenorgsDedupTest.java +++ b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkOpenorgsDedupTest.java @@ -44,10 +44,10 @@ import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class SparkOpenorgsDedupTest implements Serializable { - private static String dbUrl = "jdbc:h2:mem:openorgs_test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false"; - private static String dbUser = "sa"; - private static String dbTable = "tmp_dedup_events"; - private static String dbPwd = ""; + private static final String dbUrl = "jdbc:h2:mem:openorgs_test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false"; + private static final String dbUser = "sa"; + private static final String dbTable = "tmp_dedup_events"; + private static final String dbPwd = ""; @Mock(serializable = true) ISLookUpService isLookUpService; diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/CrossrefImporter.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/CrossrefImporter.java index cda4983b7..ee6136b58 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/CrossrefImporter.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/CrossrefImporter.java @@ -101,9 +101,8 @@ public class CrossrefImporter { int size = decompresser.inflate(buffer); bos.write(buffer, 0, size); } - byte[] unzippeddata = bos.toByteArray(); decompresser.end(); - return new String(unzippeddata); + return bos.toString(); } catch (Throwable e) { throw new RuntimeException("Wrong record:" + blob, e); } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ActivitiesDecompressor.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ActivitiesDecompressor.java index 420c363ec..feb540fcd 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ActivitiesDecompressor.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ActivitiesDecompressor.java @@ -113,7 +113,7 @@ public class ActivitiesDecompressor { } } else { - Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer.toString()); + Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer); xmlParserErrorFound += 1; } } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLActivitiesData.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLActivitiesData.java index c834efa20..4de4a0266 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLActivitiesData.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLActivitiesData.java @@ -16,13 +16,13 @@ public class ExtractXMLActivitiesData extends OrcidDSManager { private String outputWorksPath; private String activitiesFileNameTarGz; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { ExtractXMLActivitiesData extractXMLActivitiesData = new ExtractXMLActivitiesData(); extractXMLActivitiesData.loadArgs(args); extractXMLActivitiesData.extractWorks(); } - private void loadArgs(String[] args) throws IOException, Exception { + private void loadArgs(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLSummariesData.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLSummariesData.java index 843889108..5c2a35229 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLSummariesData.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/ExtractXMLSummariesData.java @@ -17,13 +17,13 @@ public class ExtractXMLSummariesData extends OrcidDSManager { private String outputAuthorsPath; private String summariesFileNameTarGz; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { ExtractXMLSummariesData extractXMLSummariesData = new ExtractXMLSummariesData(); extractXMLSummariesData.loadArgs(args); extractXMLSummariesData.extractAuthors(); } - private void loadArgs(String[] args) throws IOException, Exception { + private void loadArgs(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidAuthorsDOIsDataGen.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidAuthorsDOIsDataGen.java index 2ec4fe59d..3b4033450 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidAuthorsDOIsDataGen.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidAuthorsDOIsDataGen.java @@ -16,7 +16,7 @@ public class OrcidAuthorsDOIsDataGen extends OrcidDSManager { private String activitiesFileNameTarGz; private String outputAuthorsDOIsPath; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { OrcidAuthorsDOIsDataGen orcidAuthorsDOIsDataGen = new OrcidAuthorsDOIsDataGen(); orcidAuthorsDOIsDataGen.loadArgs(args); orcidAuthorsDOIsDataGen.generateAuthorsDOIsData(); @@ -30,7 +30,7 @@ public class OrcidAuthorsDOIsDataGen extends OrcidDSManager { ActivitiesDecompressor.parseGzActivities(conf, tarGzUri, outputPath); } - private void loadArgs(String[] args) throws IOException, Exception { + private void loadArgs(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidDSManager.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidDSManager.java index bf13db021..73a4bfd05 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidDSManager.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/OrcidDSManager.java @@ -19,7 +19,7 @@ public class OrcidDSManager { private String summariesFileNameTarGz; private String outputAuthorsPath; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { OrcidDSManager orcidDSManager = new OrcidDSManager(); orcidDSManager.loadArgs(args); orcidDSManager.generateAuthors(); @@ -56,7 +56,7 @@ public class OrcidDSManager { return fs; } - private void loadArgs(String[] args) throws IOException, Exception { + private void loadArgs(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkDownloadOrcidWorks.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkDownloadOrcidWorks.java index 57ca2aa71..59de7ca80 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkDownloadOrcidWorks.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkDownloadOrcidWorks.java @@ -46,7 +46,7 @@ public class SparkDownloadOrcidWorks { public static final DateTimeFormatter ORCID_XML_DATETIMEFORMATTER = DateTimeFormatter .ofPattern(ORCID_XML_DATETIME_FORMAT); - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils @@ -246,6 +246,6 @@ public class SparkDownloadOrcidWorks { return name.getAsString(); } } - return new String(""); + return ""; } } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenLastModifiedSeq.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenLastModifiedSeq.java index d146f712a..178d07608 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenLastModifiedSeq.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenLastModifiedSeq.java @@ -33,7 +33,7 @@ public class SparkGenLastModifiedSeq { private static String outputPath; private static String lambdaFileName; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenerateDoiAuthorList.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenerateDoiAuthorList.java index d831f8509..7d9f39d05 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenerateDoiAuthorList.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkGenerateDoiAuthorList.java @@ -36,7 +36,7 @@ import scala.Tuple2; public class SparkGenerateDoiAuthorList { - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { Logger logger = LoggerFactory.getLogger(SparkGenerateDoiAuthorList.class); logger.info("[ SparkGenerateDoiAuthorList STARTED]"); diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidAuthors.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidAuthors.java index 0eb844fe2..51326c610 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidAuthors.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidAuthors.java @@ -38,7 +38,7 @@ public class SparkUpdateOrcidAuthors { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .setSerializationInclusion(JsonInclude.Include.NON_NULL); - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { Logger logger = LoggerFactory.getLogger(SparkUpdateOrcidAuthors.class); final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -204,7 +204,7 @@ public class SparkUpdateOrcidAuthors { a -> a._1().equals(authorSummary.getAuthorData().getOid()) && a._2().equals(authorSummary.getDownloadDate())) .count() == 1; - return (oidFound && tsFound) || (!oidFound); + return !oidFound || tsFound; }); Dataset cleanedDS = spark diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidDatasets.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidDatasets.java index 71c011ebc..fa17e97e3 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidDatasets.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidDatasets.java @@ -38,7 +38,7 @@ public class SparkUpdateOrcidDatasets { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .setSerializationInclusion(JsonInclude.Include.NON_NULL); - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { Logger logger = LoggerFactory.getLogger(SparkUpdateOrcidDatasets.class); final ArgumentApplicationParser parser = new ArgumentApplicationParser( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidWorks.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidWorks.java index 185e5ec46..5ebbc01ed 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidWorks.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SparkUpdateOrcidWorks.java @@ -35,7 +35,7 @@ public class SparkUpdateOrcidWorks { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .setSerializationInclusion(JsonInclude.Include.NON_NULL); - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { Logger logger = LoggerFactory.getLogger(SparkUpdateOrcidWorks.class); final ArgumentApplicationParser parser = new ArgumentApplicationParser( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SummariesDecompressor.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SummariesDecompressor.java index c16899977..c85b5b691 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SummariesDecompressor.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/SummariesDecompressor.java @@ -124,7 +124,7 @@ public class SummariesDecompressor { } } else { - Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer.toString()); + Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer); xmlParserErrorFound += 1; } } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/util/HDFSUtil.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/util/HDFSUtil.java index 977b55a6f..e1a913476 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/util/HDFSUtil.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/util/HDFSUtil.java @@ -35,7 +35,7 @@ public class HDFSUtil { if (!fileSystem.exists(toReadPath)) { throw new RuntimeException("File not exist: " + path); } - logger.info("Last_update_path " + toReadPath.toString()); + logger.info("Last_update_path " + toReadPath); FSDataInputStream inputStream = new FSDataInputStream(fileSystem.open(toReadPath)); BufferedReader br = new BufferedReader(new InputStreamReader(inputStream)); StringBuffer sb = new StringBuffer(); @@ -60,7 +60,7 @@ public class HDFSUtil { fileSystem.delete(toWritePath, true); } FSDataOutputStream os = fileSystem.create(toWritePath); - BufferedWriter br = new BufferedWriter(new OutputStreamWriter(os, "UTF-8")); + BufferedWriter br = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8)); br.write(text); br.close(); } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParser.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParser.java index c98d63b91..52e076105 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParser.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParser.java @@ -39,7 +39,7 @@ public class XMLRecordParser { private static final String NS_ERROR = "error"; public static AuthorData VTDParseAuthorData(byte[] bytes) - throws VtdException, EncodingException, EOFException, EntityException, ParseException { + throws VtdException, ParseException { final VTDGen vg = new VTDGen(); vg.setDoc(bytes); vg.parse(true); @@ -134,7 +134,7 @@ public class XMLRecordParser { } public static WorkData VTDParseWorkData(byte[] bytes) - throws VtdException, EncodingException, EOFException, EntityException, ParseException { + throws VtdException, ParseException { final VTDGen vg = new VTDGen(); vg.setDoc(bytes); vg.parse(true); diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/ActivitiesDumpReader.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/ActivitiesDumpReader.java index 04a3389ed..124a1b9ef 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/ActivitiesDumpReader.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/ActivitiesDumpReader.java @@ -123,7 +123,7 @@ public class ActivitiesDumpReader { } } else { - Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer.toString()); + Log.warn("Data not retrievable [" + entry.getName() + "] " + buffer); xmlParserErrorFound += 1; } } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/GenOrcidAuthorWork.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/GenOrcidAuthorWork.java index d3e9aeaef..4a64124d1 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/GenOrcidAuthorWork.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/GenOrcidAuthorWork.java @@ -22,7 +22,7 @@ public class GenOrcidAuthorWork extends OrcidDSManager { private String activitiesFileNameTarGz; private String outputWorksPath; - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { GenOrcidAuthorWork genOrcidAuthorWork = new GenOrcidAuthorWork(); genOrcidAuthorWork.loadArgs(args); genOrcidAuthorWork.generateAuthorsDOIsData(); @@ -36,7 +36,7 @@ public class GenOrcidAuthorWork extends OrcidDSManager { ActivitiesDumpReader.parseGzActivities(conf, tarGzUri, outputPath); } - private void loadArgs(String[] args) throws IOException, Exception { + private void loadArgs(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/SparkGenEnrichedOrcidWorks.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/SparkGenEnrichedOrcidWorks.java index 5bcec7224..9f8727d30 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/SparkGenEnrichedOrcidWorks.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/SparkGenEnrichedOrcidWorks.java @@ -52,7 +52,7 @@ public class SparkGenEnrichedOrcidWorks { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - public static void main(String[] args) throws IOException, Exception { + public static void main(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils @@ -169,7 +169,7 @@ public class SparkGenEnrichedOrcidWorks { oafPublicationRDD .mapToPair( p -> new Tuple2<>(p.getClass().toString(), - OBJECT_MAPPER.writeValueAsString(new AtomicAction<>(Publication.class, (Publication) p)))) + OBJECT_MAPPER.writeValueAsString(new AtomicAction<>(Publication.class, p)))) .mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2()))) .saveAsNewAPIHadoopFile( workingPath.concat(outputEnrichedWorksPath), diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/json/JsonWriter.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/json/JsonWriter.java index a89bbc279..23e9dd884 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/json/JsonWriter.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/json/JsonWriter.java @@ -16,7 +16,7 @@ import eu.dnetlib.doiboost.orcid.model.WorkData; public class JsonWriter { public static final com.fasterxml.jackson.databind.ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .setSerializationInclusion(JsonInclude.Include.NON_NULL);; + .setSerializationInclusion(JsonInclude.Include.NON_NULL); public static String create(AuthorData authorData) throws JsonProcessingException { return OBJECT_MAPPER.writeValueAsString(authorData); diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/oaf/PublicationToOaf.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/oaf/PublicationToOaf.java index ff46c3383..a816ca991 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/oaf/PublicationToOaf.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/oaf/PublicationToOaf.java @@ -88,7 +88,7 @@ public class PublicationToOaf implements Serializable { this.dateOfCollection = null; } - private static Map> datasources = new HashMap>() { + private static final Map> datasources = new HashMap>() { { put( @@ -99,7 +99,7 @@ public class PublicationToOaf implements Serializable { }; // json external id will be mapped to oaf:pid/@classid Map to oaf:pid/@classname - private static Map> externalIds = new HashMap>() { + private static final Map> externalIds = new HashMap>() { { put("ark".toLowerCase(), new Pair<>("ark", "ark")); @@ -529,9 +529,7 @@ public class PublicationToOaf implements Serializable { if (jsonArray.isJsonNull()) { return false; } - if (jsonArray.get(0).isJsonNull()) { - return false; - } + return !jsonArray.get(0).isJsonNull(); } return true; } diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/similarity/AuthorMatcher.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/similarity/AuthorMatcher.java index e36ed3bbf..fff753ff3 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/similarity/AuthorMatcher.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/similarity/AuthorMatcher.java @@ -142,7 +142,7 @@ public class AuthorMatcher { public static String normalize(final String s) { if (s == null) { - return new String(""); + return ""; } return nfd(s) .toLowerCase() @@ -189,16 +189,13 @@ public class AuthorMatcher { } private static void updateRanks(List contributors) { - boolean seqFound = false; - if (contributors + boolean seqFound = contributors .stream() .filter( c -> c.getRole() != null && c.getSequence() != null && c.getRole().equals("author") && (c.getSequence().equals("first") || c.getSequence().equals("additional"))) - .count() > 0) { - seqFound = true; - } + .count() > 0; if (!seqFound) { List seqIds = Arrays.asList(0); contributors.forEach(c -> { diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/DumpToActionsUtility.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/DumpToActionsUtility.java index 8096c4e8e..c5a7a281b 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/DumpToActionsUtility.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/DumpToActionsUtility.java @@ -20,7 +20,7 @@ public class DumpToActionsUtility { public static String getStringValue(final JsonObject root, final String key) { if (root.has(key) && !root.get(key).isJsonNull()) return root.get(key).getAsString(); - return new String(""); + return ""; } public static List getArrayValues(final JsonObject root, final String key) { diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/Pair.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/Pair.java index 8883d00f5..1c2b13a52 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/Pair.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/util/Pair.java @@ -3,9 +3,9 @@ package eu.dnetlib.doiboost.orcidnodoi.util; public class Pair { - private K k; + private final K k; - private V v; + private final V v; public Pair(K k, V v) { this.k = k; diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/xml/XMLRecordParserNoDoi.java b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/xml/XMLRecordParserNoDoi.java index 15cd4f268..29791bbbd 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/xml/XMLRecordParserNoDoi.java +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/orcidnodoi/xml/XMLRecordParserNoDoi.java @@ -43,7 +43,7 @@ public class XMLRecordParserNoDoi { private static final String NS_ERROR = "error"; public static WorkDetail VTDParseWorkData(byte[] bytes) - throws VtdException, EncodingException, EOFException, EntityException, ParseException, XPathParseException, + throws VtdException, ParseException, XPathParseException, NavException, XPathEvalException { final VTDGen vg = new VTDGen(); vg.setDoc(bytes); diff --git a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/ElasticSearchTest.java b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/ElasticSearchTest.java index 69a2547fd..b2a5ccc8f 100644 --- a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/ElasticSearchTest.java +++ b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/ElasticSearchTest.java @@ -30,12 +30,12 @@ public class ElasticSearchTest { private String indexName; private String indexType; private String record; - private int readTimeout = 30000; + private final int readTimeout = 30000; - private int nThreads = 4; - private int nTasks = 150; - private ExecutorService executorService = Executors.newFixedThreadPool(nThreads); - private List> resList = Lists.newArrayList(); + private final int nThreads = 4; + private final int nTasks = 150; + private final ExecutorService executorService = Executors.newFixedThreadPool(nThreads); + private final List> resList = Lists.newArrayList(); public void setup() { indexHost = "ip-90-147-167-25.ct1.garrservices.it"; @@ -67,12 +67,12 @@ public class ElasticSearchTest { + " \"blob\": \"H4sIAAAAAAAAAO19zXLcOLbm/j4Fw4spdYSRAkiQBDQuV9iyyla3y65rqW7/bCZAEpR4nUnmJTPtUq36HWZ1I2Z2s+l36F29ST/JAGQyCclCiqJIZjKT3SXbygRxDvFzzvcdHAAvfvh1NjW+8DSLkvj7Z2gCnxk89pMgiq++f/bL5Y+APDOyBYsDNk1i/v2zG549++Hlv71IuZ+kwUnxlzFni+vvnx1D8T8gfkxgmwgD7NK/PDOEgDg7ieIFT2M2/f7Z9WIxPzk+/vr16yRJ/SgQf14dx9lxWaJ8ggdLny1ytXSPrIuUzwRRtohif/NTSqH1c3wu3oQteLDhsXWZ8qlkcc1TELMZ1z9VlSmfmvGZJ9r7Oprrn6rKrFsjTZN0Q0vIr8uyfjKbbWqA4vuydNGF+tLF92XpuVArEb0EAr5g0TTTP3e3ZFnDZ37zdaPAVYH1m8/E0xveXH69LvtrMYZAFPB4EYUR39Rm3xYu6wmXcTEDdM+uCqxbJeUhT8XE4ZsapCpUPseCIOXZhmdWBapZ9CUSAxDMkyzaPMjvlixrEC37Wf+U/LYseS2mSZLe6AuvClT9NJ8mNzPRlJs6qyxTPvVfSzYVTf/QTL9VrHw24+mXyN8w+VYFbo/dh0bsum+EhRBtGG3q0qpMNZsyzlJfTvplumHY3i5XaSh+T/mXiH/dpOa6UPmct5xu6FX57V39RAVZskw3td43RZ+9/DdD/O9FYT9O8sLK5Cm+VUss0+ilrD0T1VdV3+MlXhwrT3xTjfQuLzc9lRf45rHrJFu8XItdF84/Ll7keOObvFAmrDrDFUlqiWnisyl/yeMXx/d8vBK4scoXqxlVTj1FUPmNn/J8CoAZX1wnwcs3kTDPixfHuu+/rSFberMok+4eBMKZvTQhcgG0ATIvITnB9olNJo5j/q2q8+4T3zT0lGULMBOYQbRdUNZqQlkrdC8hPBH/mXCCkPO3dZPf88w9bztl0YwHLxfpkivvuPr02/ICxxT15U7hzmN3vtzw9DyNZiy92VzL7UKr7r23A18UdmVlfjZgpdIA1WxhRAESVdBL0zzB5AS6E4pRzRZeqSRxifElyiIvmkYLYevnS28a+c9WSt6H56pKVAXzcaeophlS95W+t777X7isFYkhZZ6YeGLDmkNKeekKkZxcRV94nIOz7OVPLI2Y8SnJ2ItvoMutgg/UGbJZNL3Jy758FS+Sq5j99tt9daoFH6hTtFcQLYqilZ7GxurVZ6p+P1Y6Xvm4QqkKqM02DVWl2LNjpabbfu2OO9xY452imoFWcyY42HzEwNiotDFfLoBgRMJTIoIdE5nPNkyZOq92vK5HUJH5lN2AKA74r2K+3XnpTTNs8yvrZ1jXjanWX8CGb7//tgzwp5HwvsIH319afeJ+VFFUcPzq55+B/Zd3/355Zlo//+J+fHVxSe9HF7rqczCxsZrbcENXz4Pw45sHjx/VJHcaMJ/RfxKc5LvMOE2mU37FjfdJHCTx3Xq/tTj3SL+nwJ1ZIn42CDXmaRJGUy6mxZVwn7pn60hZd/ZnfzpfpnzJFhPxzwnzJ8vPx/MkXbDpMY9Xpk2QZ2khQSosJGBrC3kUEjPwCA/FkBXQBHs+BAyGCPguDngQWA7x0R8m14vZ9D5t7xiMuyVu/6ra2832UCmZ44iC7G40kwXfVU3uiiOWZHKz3S4JpVrBinCXzHzj82WZWxXcw6Tvo+Kb3+ue8k/zAbXR0GPeQfEG1LRsCh/tDO570eOyric4BAIgAojI97cF6iITZJGOHMKjWlatf3QI++QQVt8rAxosbub85YWfzJeZ8WopKGhqnL9Z1/RNyVp1fmHTJX9p26ZJHYRdCO+tryhVq8LSp6yCHlmu7kQUPGZC5Sg4LjD0RJT7geUvcR58/5D8b5yETnrKpzlFl9HdlxmfhvdWd6vUHedT01SpbqiuhV7HKVTKuvqwCnYpsTGQLWeSB2/0OVWUrHHwwLZJTWqrqKlE+jf6HrXcbdda1bVea9jsxdaltPWsQ6APuPl1scd6QVOY69wL0BNkTdxHMSG1j0OB36IimnSVJst5A7exVoWc2HBiEruZ21BGanZ8T7GqsZTmrUbm2mM7pg2Ri+5xs/W64nhdwT0+f7NRvsdbk8qn2nRiQ6uut+6j6VUZm7z2t+XqeG71qY68tyriqR5crevRXjx/+HGe/J5Gfao3v0eLzZ0uXo7F0W/5/H9Yyxrq6fVSK1rxk3p94Yv59/LD8atq3sgP6j2aLONFevPy7evq4dVHD/fig0qWUoSdYTMvulrms7lek9Z7XvXz2EHOWrd6jzxWunx8NWo+nX94+/bj+zf3SVSKPdyKj20cZblmU9EXxxvdwV0wVd/jdeYcH++n1frbco42dFxq282d47qCJztHN28cLBvHJOK/CYW0G+fYrOlVGaNzNEbnuFnL0TneK2V0jhtacUjO8VY5hbjeT4BXqVMb2W9ZRkei72Y3bazsm8K6Wqusu431KcV0NSkpOhurUsvp6rqV+rSxttsldfV9k9ZTa1W2Kq2rd5VstbG2soyuDpl/trGCvMBjAiJPiCTdVa4JznuieLV+Fedt9k5KyYet8Dcx2ZWnlC0tPntE7HZTzUV01ifcJYFrA4c4JsCMQkA5gQBDhBw/9ELPp/cK1AR3N0mMk3QmpsNvPDAWKYuzKE9MlMk8z5rrUVX6OGWeGvNd133PY12OhSCJWh0ACE4QRtYxtQjtpacfFjiMLr2v1D1W4IWctbkVvS/6SRDBmGzmc/LZ46rkfWubNdYo1Ua7Q+6kRUTAyumX5Z5YeOLQ2uuUar1d2VtVxkjulLr2ntzlk2cRLaY1erwo9pqL1wmfG5/E31dRssyM8hMWB8aPLFpcr0VvqPjF8UOi64MATel6Xdw+GNDV3i0g0EndGijQKdSWF8nrrw0ONNo0HCPNQIKuypaAgq767sGCTvJ2uromcFCVfkzKXQ4JCoWPvdz0yTcobCFYfSBMIQilKTyqM8XKHDxFmU3WWo6x/xRGSr4ZSxeRP+WlNdUOv7VzrLTfgAXUJ24EF85DyGv98k8efE78yC0sZP1Y8cHDITJ28xKiKijENAG4ypvXeqWi7cpmK9zOJ0FzswUT6M74MZomWRJGbNWStwveg02/QZ0b4kgPLKy0QLIRmRC3hUyDgZJsFmJKUWCKNvEZwBxx4LkhBizgvsUwZJ6D+qBejfUYBiNrbyx0QbIhtY6TX8MkDa69Y+oSYbKoi7Fr2hOIrAl2+hgBzRUZxhBoh5Q70CVmLVJeluyGlJvSgkpSbp1YdGJTuytS3sQ+qzJGUq7UNZJyRbmi2JkvQGAWJdPk6ua5ceYvZzyOslnByS+TKU/z2T8S8y2ACJ3UrQEJnUIjMe8ZXOjEbg9g6DTah6ERZV7c+ti41fp9jYGHhbbW13OWLkCyd3EbrvhLwEt3mYdtFmtveVTHCjeI3XhJ8hn412wu3rKHwE0VgdEHbloIplxec+NjbpSMd6Id5UsaSWi855EXR78NNKxiTxDCux1W2QaVxrtCpe9XZBhUur0h0GFkjYQEObYVCGZOCMA+poBRV9Ann9jc9JBneo/am9h0MDTWYxhjob2wCqJ1wyqyZD9hlfZ2ebVgn1UZY1hFqWsMqyjKFcXybIY8hvKJs2yMn2hq79ZV6KRuzV3oFNoHkryt+EkjRKkTuz1UqdNoH4bGGD+pJ/kQ4id5bkseLklzx3hUx+COoZJ9DpVYE9dpYTvvvoVKGhm1Lpxac0O3fXrc3hDoMFTiYQpDy+bC8PkuwA7zAfMCKP5g3KLQtIQp7GMwNNZjGGOhxVAJqR0qIX2FSmBnoZIG9lmVMYZKlLrGUImiXFHs5+s8g3Z+LSDR1BAIJ19by2Mnp9epPGyMxcabxF+kUcyzMZCyBUeik7o1Z6JTaB/Y8rYCKb2NjG4xp06jfRga3QdS7n21MZKyi5GUueo3wWLlN/PQil/6TRCUfvOojj0e4yx7HGeB9sQ03d2OszQ0cB1Gibdj17ZPltvr0w4DJ3YYEu4jBGzumQBbPgMUcRvIDZLYtSkitJe9O431GMZYaCNwYkOLEGLVyDGpSrYfOEFE9AmA+bn/lnliOhMLd7Z1p4nBVWWMgROlrjFwoihXD6SMgZLeHYdO6tach06hkQ13BRh11e8IGd7HszQE9wVFMAlcrwyh4PtgWhjCozoTrAHzlTcGCj8upe0P8b3jT7JhUl3kTqhJd5vqdkiLsBtyxjkBbugTMeCxAxhiDgg8aBNomQ5Ffh+0qLEeh0OLRCM4FqHmw7SoKtkBLbpzV6w1cezuUu8bTE5VxkiLlLpGWqQot6ZFSVqsH/+csl+jzIjikhT96+//nZVLzKvl5mR5db0YydIW3IlO6tZcik6hkSxputYCNpI4Gjs2sPoaApaQmYu8X+K4bPggWRLmMV8mnOfmUWCGkipl5TLiakkxt41Hdebc7q8cuv0QqJWjMaKZ8T7yrxfcCHi6djk8NobJqCCdELPhPbZ7wKjcIKDIEraOhQKZY8hdQLBcSzdxACFxfOw1sn+PtX2N9RgGo2pvLHS0kExcRBxs9reQvEngMPq0RZZs1WbJ1uBZchODq8oYWbJS18iSFeVu4ZSR+G7B6+ukbs3z6xQaiW9XaEBXffeIQCd571cJy+XAOjOpIZ3tgcZWqQCPOky/8poz/WH6j6C/u8ZPkT3BpnWw/NR3oMUsh4DAtMSY9m0XMM/3ADUDC5nchtz2+iAtjfU4HC6DTSwaiNRIhKxKdpIICa0yEZKeYDShLurwsK1HT05VxshllLpGLqMoVxRDLnKMf/39f99a5ZODzJA+cqQ4W3AcOqlbcx46hfYe9srJAaoFntz2ymlxVKe5d39Zpw4ebm9ZR1gWY8oXvy14JoyNx+PsP9l1OswlHUQnFMGDhcxW4KPADCEgLKQAMxICBj0b+Jy7DAUoYAHvAzI31uOAIDPC1HbrJMlVJbuBzGYOmS05fTCeUKu7a38aTE5VxgiZlbpGyKwoVxR7mwTPjVM5ZAWOyIrbfj7wREyPRSIvNFBy5kb8vAUvopO6NU+iU2hcItAvEViYuMjGFuxzI9EDQsf0uM106SoJgF/axTxLLlbMopIrd1RnujXgT34ShzzlsTBaczbfJw714+//TI1lnPHUeDv9/Z/+ZyORuXEBy8rfRXOLVx8qnyITbDu7zad4yyTKBJk5ETPAFugYOsQ03T4oU02pwyBI7XVuh2SZUmSGtu8AAnkobyBngHHXAq5j84C7GDu8n6S4pnoc2ljo5MBi6Mqj2oAFLESBRYgFAf5fvfT7Y8UPo7tbTI3EtVMj8eBTI5s4WlXGGBtR6hpjI4pydw6kLTYRXviRRONjROSh2ruFCjqpW4MLOoX2ISLSjCfoqmyTK+hk9MQXdOL3odM7O3i4PdyoE9Y3dtTpsQ/DoLNoqGj6vOV7u7HrYaGDjIZqFNrN7jYp6fNM6Urq/vR3j9Hv9ZnSxQbxrIC+atS7DqQas4bKY6S5QNrxgseSs59lGbvJjCQ2FtfcUEjG/2Cz+f+UP2uqkYTG28mfJyXfGO6ecZvu+B3oXd7q5EBqQlE5h1Aem0AhYIE8RcElPHA9j/rwfvvR9q1OTfUYRhCtvbHQ1eIIpg41TYSsXs6iril1GJ3bRoTUcS3XwqRGhLQq2c2VXQggO99w4Z5Ac+LCLjePP9ryqjLGCKlS1xghVZSrTp5+J6xiKlARv0pSPhOz3TfO4zCKo0WVZDTGR/uDBTqpW4MGOoX2IUbSZXz0aXBBJ6MnyKATv/fbbOQZ1NffmEQBFgqTeFRnqjXgzSUlZKmQNuU9UOcqrfAxG9BRFWudtbIBvWqHOxt1zs5ffzj/m/Hp7D/Oz/48TOKMrIlpkd0mzh0kmGDLxDJKLw9ncB0To/tjeO2nltQUPAzK1F4Xd7n5igXIczwTWCGyAQ7l3kMm/oAB90Lf8jyb9pJJ2FiPQxsLHcZGIKYU475jI5ukDqNzW8wec2pnjzl9ZY/hDq8zf7RzVWWMsRGlrjE2oih3a6e40Kw4aVku5mRLL1swubIjE8quo8yY8QWbX99kkZ/JAnJFKIp9JqaxNA1jAGUL2EEndWv4QafQGEDpElPoZPSEK3Ti96HTO0owa4c36sT0xx11GuxD13eVZSSa3XUwMc0+T+F8QOggk4w0Cu1adyOLuBSJf/XZ3ZuFDrK7e4yM37pbJL2RNzOu8XCeZSbwMFDwsCwgo+kKHj6qg7N2P+usWegc1gqdq88F7OYlQuun5K+dhdsT/3PBaspctSQ2LsruHWj4HU9s/Mh7u/sOv3cXjzMxQaJFet3I/YDUYcTj2uvcTjbvEngMBT0hLiETYYvwhLpYtHhPSyyPk39oHd7hSktgQWIG1AfQCgOAWWgCYtsOsDnktmk5iONG+zEeOwYa6zGMsdBioqJdO1HR7itRsbtj7hq4WlXGGIxX6hqD8YpyVaLiax7zMFoYi0TddLEKul8slkH+y/kiM95F2UIwlDH6vgV/opO6NZ+iU2gfonHdRt+fwiB0MnpiETrx+9DpnW3vbpFZ6KT1zi50ihxEHqtXeEywSICyIXQVlsukx5S/RItMhu6kxzyqY4n3Jru1hbDZ6zRaRNm18cfiUyNM0hyOrBCIBCQKVhlmGA06EwfTgw2j2eL/gqX1HEbbJHUYTLq9zu0gjGYTFx17zGfBcXEbn2s6NhYjHUJL/MBeshgb63FoA6DDsBqyqR8izwLYC6BweL4NPIwpgL5tu45rIdPtJ7TaVI9hjIUWc1xJ7RxX0leOa2dhtSauV5UxhtWUusawmqLc3RzXN4m/SKM4P7LkMpnydMxf1dferevQSd2a+9AptA/BlI4jaE8gDzoZPREInfh96PSOImidkAqd1K0RC51C+zAwOrxjHrkOxHZvsfSHhQ4y3VGj0O5192q69dvdm4QOsru3kd0arJBwHidfI+GjOghr9zNXq2XUR2WuVjmos/qZq1aFOLvMXP0gr3hiU+M9+5rnr1bsRZ70LoPxZyyd3hhn8TS6ul7weCbmzUAj8XjiOgcbiYfCuDkU9nKYRE2pwwi+tde5HQZimcl46GICfGz6AFvyPFrPNIEXWDRkNkOm00sqc2M9hjEWWgvEUljjGt+qZD+BWKezQGwDy6vKGAOxSl1jIFZRrih2PjV8gazSyFsuEiPg02liyASNKDF4/is3eBD9JgxAJKaNseDZIpLfJP5SwplIQB3xvfibTafMkLkHUXKVsjBiRpinH4QCXBnAiIQcliXGdLwAZ0Pt3XokndSteSWdQvsQ0ek2vvsUSKqT0RMs1Ynf+8S4aAoqUwtyUwsKUwt4/isHpakFQQRyUyu/KU0tWJlaIE0tUE0tqEwtAFKMsLTr+xbqzOBBZNbVCSG0QOs/Cd6cLZj0dj+umpXJm+3BhZ9I9CX92TBZPHImLkW7zeK7PDIwcJ3Q8RyAPUYBxqYFCEeiibDrYR+KAeb1QvEb6zEMotfeWGg7/e7Nx/OTPNebWmLizdMkPBHTe7VMRR0HQsvsOw/vCcoMYzi0wftXuxVrHDJYlexnX6Pb2b7GBrZalTHyfqWukfcryhXFflofl8KmxtkXQc8/cTllxAgeufkWQIRO6taAhE6hfeDmXaThdA4udOK3CzB0Wu3DMOkqSwNAgCgougD0eexc1fFjpkaDAM5M8ZlAeMupeJmVzzyqY4v3I0ujjcwJ/lW5zHKVC/6vv/93Zlxec4Fx/ZthBligLQ3qbgdYukuTcAmGyDZ7TpPYKHUYFLm9zu0weuYzEzkcuoAiKwSYmj6gpuUCAm3fFa6FYMfvo+cb6zGMsdBimgSsnSYBh79frYHlVWWM4RKlrjFcoihXFPs5jfJJNmMLMS2fG3Pxu4ANX7iYYlmW/518lV/IRNB8aAtQOb0xprLYOif0/Zj8sKH2bv2MTurWfI1OoX1gzt0mPzwFaOpk9AQ2deL3PvlhXhhQUBhQsLafYGU/QW4/89O8K/MJKvMpQMg6o6HOtBwzGireLI8ojNjK+ywiFrNh0msEJ9B+5KXke5S/4Fph4Pt2CJBrIYA9SAAhNACUB4zjwDO9oJdV68Z6HBgDI5TWPjGEDp6BNZmcqoyRgSl1jQxMUW6VlsdZlsTPjZR/WU30gmyx9CpPj8zWR+C94WIOjRxrC55EJ3Vr3kSn0D5wrM72kGNkuy4m1O5rCDwsdFyZ3Myu0tw4gso25kRqbRqBMI350auBNI1HdabcuFZZpoMnS/FXIDzhaTKbs1iG8haJcZZv6RaNe22cij+XAv5V560OlF7RCbIOl175mAeB6dmAUuIAHMhzFhyCADQD5NoOIybr5SzWxnocDr3CJrYcQmsscFUlO6BXBEALQJLnA9MTjCbUtTqjVw0mpypjpFdKXSO9UpQrip2J4RRNjXmSiWkylVt/DT9NopQbmbBPhig7T3kcpPyHkVltwYnopG7NkegU2vuFDLkrcwrKeSIQNSjmCZDzBFTz5KhO8+8+yrb6Qdmnog0FMYuloZlxmRX4hS8XPwwTSEM8ge6OpwF2CKSxb9PADEIByEICcEgY8GyXA8dEPseexwK/l3WKxnocDpBe5X/h2plieB8O1Hn05FRljEBaqWsE0opyt042z7PZA+Vs8+pExxNjfSeS3EnwXC5aJNPkKt+KJ5c05im7momivlHE9bIRcm/B3eikbs3l6BQaFzM2LGasz6a999XGE3F3kWI9cCIuuGU+gWI981WPtfUEK+t5VGdWjkzs8E61RXBC6eEueIRBiCEReN91uID6DkeAIscTTRRyhzNCQr/R5tTHGszGehwOTyuWMUiNG6iqkr0seDjdLXg0mJyqjJGnKXWNPE1R7rafkz7uYjmXM7P44KebTMxT4WJG1rUF56GTujUHolNo7xc6VvMhx9SZMkHArJwgR3UafgTWKzj8lmeimQVgNgKeySTVbLbMJKY+W6bJfKhbMtyJC8luQ+gObvHND9VJfg2TNLj2yiN1bGxTF03EgJpA9R7wDqHzU1QZBnpubxh0yKSga7shRCagzJKHv1g2YH4YAt8PEXNhQMIA9TEcGusxjLHQ4oqXVXvFyxr8ilcTG63KGJmUUtfIpBTlVhnS+eQW5iObjZRpC15CJ3VrnkKn0D4sVHV0g2tXgFIneJugUqfTPgyPDtcxV+3f7zLmJqHjMuYDm7Iqv3hUx96OkZJV1OPymhsfc+NjvGNxIF8yz6ARH7+L5L0nN/LXs8V15GfDDJtAPLGtHQ+bdMiXOSEBdW0CsOVyORsY8JDLACecOBiRkLFe+HJjPYbBl9sbC52E0JB7fAGh5VALOghJCk5pH71eCs9qCT+0rv6aZMu2w2OioSF1TWLLdoZ99HEdkcPo2RaDXnbtoJc9+KBXEw+ryhiDXkpdY9BLUW51IKiag1gkyilZ3NXh5hK0XqZRPB5Jo6u9W0iok7o1WKhTaB+iH42xg67W1vCDTkAfGEInex96vLNwaFvkQCeqX4Kg02Lvk4hup+rLVCI1XZ/nbhIkcX440aJwk0d1zG/TWJm8sIV/7SFUViWc6UNl6nPiR6AxVPmV4oOuQmwXpx8vL88v3hl//PjLpw+v3hsffzQu3519fP/x7V8HGlJzJwQe7uW2HsWMmqYHiON7gtA5BBDuiibybSd0A2iZ8H6BLRPvxnoMg423NxYarhRtXCUCUDS7TV1gIxs02/HWZJkolylEjrvp8+BJjWtqq5L9hFk6u6a2idVVZYxhFqWuMcyiKFcUe7e8Soy3abKIltlzGWIxLtPl4rpcDjy9TqNMnmhvfOLT6CpSVB6DLP2BBJ3UrQEFnUL7QLk7vJX0qeBBJ6N7AKGTvPcE+1oYSHBVGMgVixb2Md8wL37xS/soXzG3j0d15l0Dds0DeQ0qkCR7R9h1Cyz57l74W3vejdOpvLpmsCkozgRZh3tIme2YNrSIBagnz8AmkAHKbAZCx+GhQ0PbJr2c9ttYj4OjVW5tWuUOn1Y1mJyqjJFWKXWNtEpRrsqwPE3iUNgQMUUkm/qRRYJWfZV/FDetlKe9/Ovv/+dNlPlCfmb8nP7+D3klW8yilP/r7/+3pGHVrd0j+erd4+ikbs3r6BQayZeefFmC/UBKEe2VeFVSxwT/BtwrZ1hrKyo5VyitKJBWdHX4mLy2Mr94ZWVCwVwSsZUFLVnaYmU+j+pMy93fJYD6IWfvuS9vCc0MvhC+SrzaXHin4rLQLD9iQSZhRfKfwkf9/g/Zwr//Iz/Y/u3E+POkuox5kOyNTCCiu83eml0CrDeTq2t4XUpt6riu1ewC4MeayZpSh0HH2uvcDqm5YweOQyEHISI2wCG0AHGQA1wv9E3Mqc1xoxvAH9vzjfUYxlhoj5ojVJeay5L9UHPSGTVvYHlVGSM1V+oaqbmiXFHsfGqkbC4RZ2KEPOAgZTJuz42YT405lyYqTUrwErE4GUn3FnyJTurW/IlOoX0g3c2ApK7KNsGkTkZPgFInfu9XPaMpKI0kkEZyZSOBsJGgtJFgWtnIozpzrwGzLlkjSxeRP+U9kOtqG0K3t54KrpotmABmxo95BncYMeMDT8CFn0jEI2odJnFG5oRguNvEuZstuCYRGNwl4l/K1bkd0qc6IofBlNrr2S6vr3UC4iFKQRB6qNg6QbjlAZNAEtgO9zzey5kKjfUYxlhokTXXuL62Kjlw1tzE7KoyRtas1DWyZkW5W7dunRivk+DmuXGx9AR6iX3+3PgpiVllaUeO3J/n0EndmvfQKbQPHLm7rddPxY06AX1gR53svSfIK+4LPGEOBQJZWUPhcYUxPKozyXZ+g21PRPjDMvZlavUfi0+Nj2G+C6U8h078euGLkevzgdJhZ4JNc7fpcJekift+GBIIaGC7AENsA0a4Cwj2rRCZhNGeSFNTPQ6NNEFaOwuYDp80NZicqoyRNCl1jaRJUe7bq4p/4gs2v76R+1nWWb3jsVUbau/Wgeikbs2J6BTaB+7U2Y5KaiOMEMSot6vQhNQHhI5JvTUvH55VFnGdqLs6oajOTNv9PN2eGNTqXMSbYgdlfnfwTwLRpbGxPkPxxjhCtg0Bcm34h2ESKWhPCHV3m0h1tK7oOpaLrF7XFR8SOQxa1F7PdkiRA+LCgDECOA5CgE2CgOdaDDimFWIzsE2b9nLVXWM9Dm0sdHJWO4GSb1uWS8U/TRtSy+ol+/5xwofR1S1GQ2pcCF6VHPqe6AYeVpUxRkOUusZoiKLc6kTvNFkkV4yzMeCxBTigk7o1SKBTaB8CHh0uFj+RDOgE9EEIdLL3occ7O6e7LWCoE9UvONRpsff5AvPS+R3VMapjckDh+F99+PDq/YU8O/vi9Pzsw+nZMGNXCE3MA76NjjkODSgJAbJtuTfEcQD1/BBQi5mcWgHFpJc7qxrrcWi0F+HamdO4L9pLO0sCaDA5VRkj7VXqGmmvolxR7HWUiekoBtM0yFdootmMy/l+YiRpdBXFeToAMz7zG8NPYp/PF+XJYDxfy5kVazlB4svVMS5LL+PoC08zNjWuWSokjSkE23A/Oqlbc0E6hfaBX3WWQmBhgi3HMXFvdFqeC7ZZ6JhCsJlMeZVJza88Kk0qWFlUmU7AgLCoYGVRy1PCcosKCosKSosqS68tKlhZ1KM6M3XnUxAg7YenXcx//3+LlMcsyrI8D766vZFLb/aGLxeZfz2V/g/ZLgTIseGJcRYvvv7+zzTkxt++RuJ7HhvvljMWR9lsmRm/iMJ/SuKQZ9nqmveIp8v46rnx8fPn5XQhSl+mLIikvuLfsviFf72crvNKPg+ULcKJSezdZotdXK4DBcMg0EEU9naxzlrigGlfe53aYQiAeJSHrs8AC7mwpr4bAI+GASA2YQ4lNnRpL6eBN9ZjGGOhxRCAVTsEYA0/BNDA4qoyxhCAUtcYAlCUu7N5+lVsnMcLLrTzF0tB319HyVUqwcpI4LfgPHRSt+ZAdAqNBL4z1Kirv3vkqJO890uh5dZpJg/vrowh8EpjeFRnkjXk3sPj3DtHT+kE24d7rxHlLnMC6ICAegRgaHIxLCVUDUPMTWw6HuklXbuxHsNgMu2NhY42ZTiCj1BHJkT10t11RA6jZ1vkqGZtjmoOf696A7Oryhg5qlLXyFEV5aobq1Y8FbzhmfE6yTIu1UxTns2FksUZLCNN7R0Z6KRuDR3oFNoHmtph5vYTEYNOQB+oQSd777mqXDRe8dWAZ8DLjaIAIapRPKoz2XY9o7evleLXqSCi2bVRHvcVJmme61Se95WE6q71gS7g2hPHxQfLkOVRd7ZtMtEgHAHsUHluA8bAIuJ3z6IWdFgflKmxHsPgUe2NhU4W8y3hcCB0sdvbYv5a4oA7tUVybNcmx/bwF3AbWFxVxkiOlbpGcqwod2sB10hi9eC2PJ37PPaZmKhy8p+UNzvLzz/xLyubUKZzX4hP4gUXP9fGqfhzmY7LvtvAETqpW8MSOoX2gU93tuz7NHyhq797jKGTvPdUulz2TWL1qLcif7uyoeW1zvLjdG1Cy/ztrDKhwC9M6FGdqbm7i8XVVur9XCyGE9va8dugOrpG2bEIpiZCxL7/8vdurlF+SOowaFB7ndthnMN1bYsGHgWe4whWxWxb/MsOge0ExPZd4lCrl55vrMcwxkKLlNipTYmd4VPiBpZXlTFSYqWukRIryhXFTsUv4nUyyYlfBWyWjVx2C75CJ3Vr/kKn0D5w2Q6vSX4iWNTJ6Akw6sTvQ6d3EsCAiLqmg03Y3xlumyWOO843By/8lbeT0Qsmvd1RHSu6+/vDqzhWt7dd5w/JLdyz58bPYhQkco93Hid/mwQDXeWHE9c53Dx427cR9VwIXJuL8W9yF1BumgAx1xM/RPz0crNXYz0Ojv3WPssa9XWWtQM7ZL+PnpyqjJH9KnWN7FdRbnUwZXq1LOhvmcB29qs8lCb28wO6hFM7ya+xPE3iRRSLgmxqnC3TZM6ZPA3FEwNw5Mtb8C46qVvzMDqF9n45kJUzCIgZVBzOVM4geSDTVRLkH/rVBAJ8NYFAkE+gozods/sQ3OkHgufWiM28NAqubqXbnkVX17ezUpQM3GFCc+hOEDrcBFxGPOzQEIMQ4wBg4lJAAoJkeroVBlBMDJ/2Ac0b63Fw0NytDc3dwUPzJpNTlTFCc6WuEZoryq1iTfwLZ1MuMzCn0VWeljki8SecntuRM9FJ3ZpD0Sm090g8XU0Y+Q75hHkQeNfphxF4HyLwJhMbHi7wdnyHWi5lgPk0BNgjUMwLRoDDGDM9RAI77GXnW2M9hgG82xsLHZ0NY5o2Nm35r17uNakjchg92yKlorUpFR0+pWpgdlUZI6VS6hoplaLcarXDEL/OWSw3sy2S2/eSzNeQZTLyqC1gA53UreEDnUL7kAzW4ekwT8QMOgF94Aad7L1nzgys7SJYJLdvF6ns4lGd6bbz58NUeYOdsuWf352//3jxUfx1+uq98e+/vPp0efbp/V+HSYeRObGps9t0uNnFtXpblt8Zi5zjCwgtCiyHoCNo/UEaGAR6WXUqFchqKzAMbtRel3e04VG0NzEtSrHZRzfXE3poXdthcMtCnFAmKmfEowCHpg2YFVLgcEwowQJJkEYXUz+24xvrcWhjoZvgFiKWaVIkgyq93NhTR+Qwera94JYJ6wa3ZMl+gluos1TeBiBKlTEGt5S6xuCWotzts53kZhSZyDsXbQFOk7mYooLCxOVlu3xi/Cm5SX//hyEpVxaJAT7Gu7YAF3RStwYZdAqN8a4uYYROQB9QQid7H3q8wz3OT+OHOhH9cESd9H3o8mYRIF2VHUWBdOL6jwTpNNn7aHd1b1exNUMCJX+Nk8ortDn4LGASB2uUdFTH+zYIf5exYJYuIn/KewiBVx5DHwJXnxM/ApcreWbFB12Fzi8uf3lzfnZhnH8w3p1fXH789Ffj1Yc3xjqi/lfj44/Gxen52YfTs2HG06E7sfGOnybXYQQOuj73LGQDlxEXYCaaxuMhB27IKMOMQT/oZct1Yz2GEadpbyx0FYGDxLIkhkW9nC9XR+Sh9WwXq2aWbR1fz68nwtMI1AppL3O5vuBhdHGLQVZUO8iKBh9kbeJZVRljkFWpawyyKsrdCrIKzX7iC3nHbxb52YlxvsiMj2l0FcVZHn99I098TuZyX7hxVKS2/GGMsm4BEuqkbg0W6hTahwBMl1HWp0FFnYA+4KJO9j70eFcht1YgpE5MfzBSp8GhhNgyMKu8JIjksYOFk8xjb0HlJFfpn0d1TO/ehNhaCJX98eMvnz68ei8DYpfvztbhMvFrFS0bZogM2RNquQcbIvM9HIQ8QHIOQIAh8gG1GAQuMuUZt8y0EG9iEx9rDxvrcXAsG9dm2bgvlm12eE3doyenKmNk2UpdI8tWlLvFskfGvAUPoZO6NS+hU+hQQPRRndbd/VNKzH4Q8ZvoN3lCdxolxnks3kx4VT+aTyPxERcu1rjwxcD9jRnc+JEHfKDY2JyY5uGe2E0cK/AJd4Bt+fKiPC5IostsQELMLM4s7Nu9YOPGehwcNrZqY2Nr+Ni4weRUZYzYWKlrxMaKct+m+a9OBlxmxmUy5Wk+7UfQvAXXoZO6NfehU+hQQPPq3t3V9ACL9fQ4qtPsgwgx9wSoX/109un89NUH4/TV5buP789PDc3RBsaJcScc/dCjry4uPp6ev7o8//hhoEAcT5BzuMcEUtviDvQswKlMiPZcDqgjl2wsTF2fmJ6D77cvLQPxxnocHBA3awNxc/CpYE0mpypjBOJKXSMQV5Qriv3EsiyaJcZPy6uYRScy3JQmwVKGn7jBplNmhPnJSWHEZOCpxO1Hn8Z0sA21d+tZdFK35l10Cu09Tp8VswfM8tkj/Ek1eYCcPGA9eUAQgRLVr9JE6nTJiOErIP06FZA3uzb+WHy6vutLOcS7OrHbGCgWRxPsHG7CCPNc7EMSAj+0fYBDDwMWmAHgvmdDzDAN3V422TTW4+CwuF0bi9vDD4o3mJyqjBGLK3WNWFxRbrXqy+XkkIiCceMsFh+KIZX9YLyWN/GlIZ8Geby82r3xMV4k0+Qqd32fStswQvItOBid1K05GZ1Cew/JA2USAV5OIuCt51AeVF+ndierKSRvuVy/9VGdrhkENEc1oLn63Cw/EwFViH7W6SEJ78/OX384/5vx6ew/zs/+PEzIDsmE0B0/BqGjM2YdTDFFDkZ2L6i8ptRhYPD2Orejcw0c1zYtS6L6fo6xqCHy0Hq2m9PA3fwcKIxRftMAtLH9lz46uBSe3RH+69jV3QZVuGNSQh0KPIdDgAkmgMndaTZlPnaZaYeW3ccAaKzHMMZCi0EVUjuoQvoKqlgd3pb1aPikyhiDKkpdY1BFUe52pmHAjTc8MS7TKE5OqnuyBG+ZGiybc3+RyTBKFVzxk9jnc2kZ5OdyoUE+K6bZGGDZgrPRSd2aw9EptA8HInR4BMYTWYVOQB/MQid7H3q8w4OGnxgk0MnoKVCgE78Pnd7dUcMtUUydqH5ppk6LvQ+ll/kqARf/JWAhoRO4hZzACjnJ6Pk6pF4BJ/mxPJt4UQCnozoOeW/C6i2Exz+dvT9/e/7xlwtjdZrwMCPkCE0oMnc7Qt5JqI3AY0gdSFwCEYIQ2xZyewu11RY+jPBKe13d4YV7GFsm7fvCvQ1CD61rO4yiOj4hgUNtwH3iA2wxAoiLfeCTIPQs12Uw7KXjG+sxjLHQYhTVrR1FdQcfRW3iYlUZYxRVqWuMoirKFcUur2X4NOQC2ZfR0J9uZEaNgIh3wqPrq9vOY5+JGSytwon40Dj7lc3mU347yvrdR1E0/c74xFk2bvvW1N6tB9JJ3ZoX0im0DzGXjm/0ag5AdSL6AaE66fvQ5Z2F2dqilzpR/VJMnRZ7H2aTEbKg8K1lwGxW+tY7EbT1bV9R5VvFZ4AXrvVWGC6RjlW0i/SrR3Xs9Rh5q2JqB7HXzJkgh+x2WK5DQo8RCblpM+D6rimmBAwAY9ACnHsYYur5PsF9EPrGehwcoXdqE3pn+HvNGkxOVcZI6JW6RkKvKFcR+g/8q3HKBCRJrpYVrWfxMvPTaF7kQ11eJzOWCXoejResb8Ob6KRuzaPoFDoImB7zr8AvZ8warFczpvhIzhjxplFwVKcnRtitLnifvxn4WjedOK5zsKDaswm3TA8C4nMx4G0XAmaZDPDQsSD0A9NyejnVuLEeBweqaW1QTYe/StZgcqoyRlCt1DWCakW51V6DKAj49LnxZhn7bFzM2oaj0EndmrPQKbQPKxtR5sWtjwxEbBs6LsSkt7WMzRJb6+W5wOuCIewbMZrmVg8EudE7qjOZGlCeIPIX+ZUoN/JQj/SmB84Da3Ae9blZflQGrKj1TH92hvpcwG5eomojkvy1K371S/w5Tr7Gw6RWkExM+3DXK5Bn2xblDnCwawNMbAt4zOUCO1uBh3zHplYv6xWN9Tg0amXButRKlhw4tWoyOVUZI7VS6hqplaJcUex8aqTRlMV+lBgBv5pGRrZYBpEhw6wsjgwWGK88ngacj7xrG15EJ3VrnkSn0N6vU0RTUM4UkM8UkM8UsJopgAWArWbKUZ0e2P1LGesA9RbA84/rg/C5cbqcLpYpM2I+NS74YsF9Ma4TQ6YNsTiO/GSgENueYGfHz7LrEGJbjh0wikPAbdsDmGITEO4gQDzL8gITce73chZaYz0ODmLXvgrG6u0qGNwZxG4wOVUZI8RW6hohtqJcUey789mMywn+Xb6F5zs+m/M08q+TlGdR9t3EkDlD4keel5sfm/QpSYokoXcsFdXcGFGsOat6hORb8Do6qVvzPDqFxqUQTddSl1CICbIty6R9DYGHhY4LIg8wsJUdzXdr3LaixWaOyoaCVNpQmTZ2XdhQAUQ0Z5Uf1ZmbO07WEKW0H7ImfdVfk2V8ZahXDV9HmbJv47mBHOwA5Mhz9gdJ15wJcQ+XrmGHhxRbAoC7kAPsEAdQ7CIQhsSjEHI/dHqha431GAZda28sNHSSG6ATxi7EyIK9HKvzgLRh9GaL5BvVJt9o+OS7galVZYzkW6lrJN+KcqsLyqcC893izEYSG2+T4PlqV86VAIWpGGbr4zV+isQbxHG0nI3segsgQSd1a0BBp9DIrjsBD7q6OwYQOrF7v6zJcgOpEmIgSOFVEqz2YJX2cX1EwmxtH4/qzLuGzHl4jPlJJPbWd7KKVW/f+rz6ZyXgxXHK/SQNToq/Xv7b/weirs3DHDIDAA==\"\n" + " }"; for (int i = 0; i < 10000; i++) { - String orcidId = String.format("0000-0000-0000-%s", Integer.toString(i)); + String orcidId = String.format("0000-0000-0000-%s", i); String url = String.format(BASE_CFG_URL, indexHost, indexName, indexType, orcidId); - String recordTs = String.format(recordTemplate, Integer.toString(i)); + String recordTs = String.format(recordTemplate, i); getResponse(url, recordTs); } - System.out.println(""); + System.out.println(); } private String getResponse(final String url, final String json) { diff --git a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java index d96955c4a..2b241ed5f 100644 --- a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java +++ b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/OrcidClientTest.java @@ -83,7 +83,7 @@ public class OrcidClientTest { } catch (Throwable e) { e.printStackTrace(); } - return new String(""); + return ""; } // @Test @@ -142,7 +142,7 @@ public class OrcidClientTest { } private void testDate(String value) throws ParseException { - System.out.println(value.toString()); + System.out.println(value); if (value.length() != 19) { value = value.substring(0, 19); } @@ -185,7 +185,7 @@ public class OrcidClientTest { br = new BufferedReader(new InputStreamReader(input)); // Read directly from tarInput String line; while ((line = br.readLine()) != null) { - String[] values = line.toString().split(","); + String[] values = line.split(","); List recordInfo = Arrays.asList(values); assertTrue(recordInfo.size() == 4); String orcid = recordInfo.get(0); @@ -260,7 +260,7 @@ public class OrcidClientTest { } catch (Throwable e) { e.printStackTrace(); } - return new String(""); + return ""; } @Test diff --git a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParserTest.java b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParserTest.java index 2fe00bd57..235db52d4 100644 --- a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParserTest.java +++ b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcid/xml/XMLRecordParserTest.java @@ -44,7 +44,7 @@ public class XMLRecordParserTest { XMLRecordParser p = new XMLRecordParser(); - AuthorData authorData = p.VTDParseAuthorData(xml.getBytes()); + AuthorData authorData = XMLRecordParser.VTDParseAuthorData(xml.getBytes()); assertNotNull(authorData); assertNotNull(authorData.getName()); System.out.println("name: " + authorData.getName()); @@ -60,7 +60,7 @@ public class XMLRecordParserTest { XMLRecordParser p = new XMLRecordParser(); - AuthorData authorData = p.VTDParseAuthorData(xml.getBytes()); + AuthorData authorData = XMLRecordParser.VTDParseAuthorData(xml.getBytes()); assertNotNull(authorData); assertNotNull(authorData.getErrorCode()); System.out.println("error: " + authorData.getErrorCode()); @@ -75,7 +75,7 @@ public class XMLRecordParserTest { XMLRecordParser p = new XMLRecordParser(); - WorkData workData = p.VTDParseWorkData(xml.getBytes()); + WorkData workData = XMLRecordParser.VTDParseWorkData(xml.getBytes()); assertNotNull(workData); assertNotNull(workData.getOid()); System.out.println("oid: " + workData.getOid()); diff --git a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcidnodoi/xml/OrcidNoDoiTest.java b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcidnodoi/xml/OrcidNoDoiTest.java index efe01522c..54c2d6217 100644 --- a/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcidnodoi/xml/OrcidNoDoiTest.java +++ b/dhp-workflows/dhp-doiboost/src/test/java/eu/dnetlib/doiboost/orcidnodoi/xml/OrcidNoDoiTest.java @@ -50,7 +50,7 @@ public class OrcidNoDoiTest { } WorkDetail workData = null; try { - workData = p.VTDParseWorkData(xml.getBytes()); + workData = XMLRecordParserNoDoi.VTDParseWorkData(xml.getBytes()); } catch (Exception e) { logger.error("parsing xml", e); } @@ -107,7 +107,7 @@ public class OrcidNoDoiTest { } WorkDetail workData = null; try { - workData = p.VTDParseWorkData(xml.getBytes()); + workData = XMLRecordParserNoDoi.VTDParseWorkData(xml.getBytes()); } catch (Exception e) { logger.error("parsing xml", e); } @@ -138,7 +138,7 @@ public class OrcidNoDoiTest { } WorkDetail workData = null; try { - workData = p.VTDParseWorkData(xml.getBytes()); + workData = XMLRecordParserNoDoi.VTDParseWorkData(xml.getBytes()); } catch (Exception e) { logger.error("parsing xml", e); } @@ -181,7 +181,7 @@ public class OrcidNoDoiTest { } WorkDetail workData = null; try { - workData = p.VTDParseWorkData(xml.getBytes()); + workData = XMLRecordParserNoDoi.VTDParseWorkData(xml.getBytes()); } catch (Exception e) { logger.error("parsing xml", e); } @@ -217,16 +217,16 @@ public class OrcidNoDoiTest { .stream() .filter(c -> !StringUtils.isBlank(c.getCreditName())) .forEach(c -> { - if (am.simpleMatch(c.getCreditName(), author.getName()) || - am.simpleMatch(c.getCreditName(), author.getSurname()) || - am.simpleMatchOnOtherNames(c.getCreditName(), author.getOtherNames())) { + if (AuthorMatcher.simpleMatch(c.getCreditName(), author.getName()) || + AuthorMatcher.simpleMatch(c.getCreditName(), author.getSurname()) || + AuthorMatcher.simpleMatchOnOtherNames(c.getCreditName(), author.getOtherNames())) { matchCounters.set(0, matchCounters.get(0) + 1); c.setSimpleMatch(true); } }); assertTrue(matchCounters.get(0) == 1); - am.updateAuthorsSimpleMatch(contributors, author); + AuthorMatcher.updateAuthorsSimpleMatch(contributors, author); assertTrue(contributors.get(0).getName().equals("Joe")); assertTrue(contributors.get(0).getSurname().equals("Dodge")); assertTrue(contributors.get(0).getCreditName().equals("Joe Dodge")); @@ -249,9 +249,9 @@ public class OrcidNoDoiTest { .stream() .filter(c -> !StringUtils.isBlank(c.getCreditName())) .forEach(c -> { - if (am.simpleMatch(c.getCreditName(), authorX.getName()) || - am.simpleMatch(c.getCreditName(), authorX.getSurname()) || - am.simpleMatchOnOtherNames(c.getCreditName(), author.getOtherNames())) { + if (AuthorMatcher.simpleMatch(c.getCreditName(), authorX.getName()) || + AuthorMatcher.simpleMatch(c.getCreditName(), authorX.getSurname()) || + AuthorMatcher.simpleMatchOnOtherNames(c.getCreditName(), author.getOtherNames())) { int currentCounter = matchCounters2.get(0); currentCounter += 1; matchCounters2.set(0, currentCounter); @@ -268,7 +268,7 @@ public class OrcidNoDoiTest { .filter(c -> c.isSimpleMatch()) .filter(c -> !StringUtils.isBlank(c.getCreditName())) .map(c -> { - c.setScore(am.bestMatch(authorX.getName(), authorX.getSurname(), c.getCreditName())); + c.setScore(AuthorMatcher.bestMatch(authorX.getName(), authorX.getSurname(), c.getCreditName())); return c; }) .filter(c -> c.getScore() >= AuthorMatcher.threshold) @@ -280,7 +280,7 @@ public class OrcidNoDoiTest { assertTrue(bestMatchContributor.getCreditName().equals("Abdel-Dayem Khai")); assertTrue(contributorList.get(0).isBestMatch()); assertTrue(!contributorList.get(1).isBestMatch()); - am.updateAuthorsSimilarityMatch(contributorList, authorX); + AuthorMatcher.updateAuthorsSimilarityMatch(contributorList, authorX); assertTrue(contributorList.get(0).getName().equals(nameA)); assertTrue(contributorList.get(0).getSurname().equals(surnameA)); assertTrue(contributorList.get(0).getCreditName().equals("Abdel-Dayem Khai")); @@ -310,7 +310,7 @@ public class OrcidNoDoiTest { } WorkDetail workData = null; try { - workData = p.VTDParseWorkData(xml.getBytes()); + workData = XMLRecordParserNoDoi.VTDParseWorkData(xml.getBytes()); } catch (Exception e) { logger.error("parsing xml", e); } @@ -331,8 +331,8 @@ public class OrcidNoDoiTest { author.setName("Joe"); author.setSurname("Dodge"); author.setOid("0000-1111-2222-3333"); - String otherName1 = new String("Joe Dr. Dodge"); - String otherName2 = new String("XY"); + String otherName1 = "Joe Dr. Dodge"; + String otherName2 = "XY"; List others = Lists.newArrayList(); others.add(otherName1); others.add(otherName2); diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/SparkBulkTagJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/SparkBulkTagJob.java index 4800def0a..1a0afb981 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/SparkBulkTagJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/SparkBulkTagJob.java @@ -123,7 +123,7 @@ public class SparkBulkTagJob { // TODO remove this hack as soon as the values fixed by this method will be provided as NON null private static MapFunction patchResult() { - return (MapFunction) r -> { + return r -> { if (r.getDataInfo().getDeletedbyinference() == null) { r.getDataInfo().setDeletedbyinference(false); } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/community/CommunityConfigurationFactory.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/community/CommunityConfigurationFactory.java index 9a24c5280..749ed292f 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/community/CommunityConfigurationFactory.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/community/CommunityConfigurationFactory.java @@ -29,7 +29,7 @@ public class CommunityConfigurationFactory { private static final Log log = LogFactory.getLog(CommunityConfigurationFactory.class); - private static VerbResolver resolver = VerbResolverFactory.newInstance(); + private static final VerbResolver resolver = VerbResolverFactory.newInstance(); public static CommunityConfiguration newInstance(final String xml) throws DocumentException { @@ -51,7 +51,7 @@ public class CommunityConfigurationFactory { } log.info(String.format("loaded %s community configuration profiles", communities.size())); - log.debug(String.format("loaded community configuration:\n%s", communities.toString())); + log.debug(String.format("loaded community configuration:\n%s", communities)); return new CommunityConfiguration(communities); } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/criteria/VerbResolver.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/criteria/VerbResolver.java index f54a1ceba..54176efb6 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/criteria/VerbResolver.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/bulktag/criteria/VerbResolver.java @@ -40,7 +40,7 @@ public class VerbResolver implements Serializable { .getParameterValues() .get(0) .getValue(), - value -> (Class) ((ClassInfo) value).loadClass())); + value -> (Class) value.loadClass())); } catch (Exception e) { e.printStackTrace(); } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/countrypropagation/SparkCountryPropagationJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/countrypropagation/SparkCountryPropagationJob.java index 974b3a3b1..97e0a33e1 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/countrypropagation/SparkCountryPropagationJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/countrypropagation/SparkCountryPropagationJob.java @@ -111,7 +111,7 @@ public class SparkCountryPropagationJob { } private static MapFunction, R> getCountryMergeFn() { - return (MapFunction, R>) t -> { + return t -> { Optional.ofNullable(t._2()).ifPresent(r -> { t._1().getCountry().addAll(merge(t._1().getCountry(), r.getCountrySet())); }); diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/orcidtoresultfromsemrel/SparkOrcidToResultFromSemRelJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/orcidtoresultfromsemrel/SparkOrcidToResultFromSemRelJob.java index 9f08fe580..e90e43a20 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/orcidtoresultfromsemrel/SparkOrcidToResultFromSemRelJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/orcidtoresultfromsemrel/SparkOrcidToResultFromSemRelJob.java @@ -105,7 +105,7 @@ public class SparkOrcidToResultFromSemRelJob { } private static MapFunction, R> authorEnrichFn() { - return (MapFunction, R>) value -> { + return value -> { R ret = value._1(); Optional rol = Optional.ofNullable(value._2()); if (rol.isPresent()) { @@ -202,8 +202,8 @@ public class SparkOrcidToResultFromSemRelJob { return false; } for (StructuredProperty pid : pids.get()) { - if (ModelConstants.ORCID_PENDING.equals(pid.getQualifier().getClassid().toLowerCase()) || - ModelConstants.ORCID.equals(pid.getQualifier().getClassid().toLowerCase())) { + if (ModelConstants.ORCID_PENDING.equalsIgnoreCase(pid.getQualifier().getClassid()) || + ModelConstants.ORCID.equalsIgnoreCase(pid.getQualifier().getClassid())) { return true; } } diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/projecttoresult/SparkResultToProjectThroughSemRelJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/projecttoresult/SparkResultToProjectThroughSemRelJob.java index 0791fd68c..c57abb451 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/projecttoresult/SparkResultToProjectThroughSemRelJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/projecttoresult/SparkResultToProjectThroughSemRelJob.java @@ -94,7 +94,7 @@ public class SparkResultToProjectThroughSemRelJob { } private static FlatMapFunction, Relation> mapRelationRn() { - return (FlatMapFunction, Relation>) value -> { + return value -> { List new_relations = new ArrayList<>(); ResultProjectSet potential_update = value._1(); Optional already_linked = Optional.ofNullable(value._2()); diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/PrepareResultCommunitySet.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/PrepareResultCommunitySet.java index bea0a3e54..a5f84cd2f 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/PrepareResultCommunitySet.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/PrepareResultCommunitySet.java @@ -117,7 +117,7 @@ public class PrepareResultCommunitySet { private static MapFunction mapResultCommunityFn( OrganizationMap organizationMap) { - return (MapFunction) value -> { + return value -> { String rId = value.getResultId(); Optional> orgs = Optional.ofNullable(value.getMerges()); String oTarget = value.getOrgId(); diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/SparkResultToCommunityFromOrganizationJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/SparkResultToCommunityFromOrganizationJob.java index 60ad43859..7201a30f6 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/SparkResultToCommunityFromOrganizationJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromorganization/SparkResultToCommunityFromOrganizationJob.java @@ -98,7 +98,7 @@ public class SparkResultToCommunityFromOrganizationJob { } private static MapFunction, R> resultCommunityFn() { - return (MapFunction, R>) value -> { + return value -> { R ret = value._1(); Optional rcl = Optional.ofNullable(value._2()); if (rcl.isPresent()) { diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromsemrel/SparkResultToCommunityThroughSemRelJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromsemrel/SparkResultToCommunityThroughSemRelJob.java index 5ac117693..4cb241ef2 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromsemrel/SparkResultToCommunityThroughSemRelJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttocommunityfromsemrel/SparkResultToCommunityThroughSemRelJob.java @@ -101,7 +101,7 @@ public class SparkResultToCommunityThroughSemRelJob { } private static MapFunction, R> contextUpdaterFn() { - return (MapFunction, R>) value -> { + return value -> { R ret = value._1(); Optional rcl = Optional.ofNullable(value._2()); if (rcl.isPresent()) { diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/SparkResultToOrganizationFromIstRepoJob.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/SparkResultToOrganizationFromIstRepoJob.java index ff34bd42a..01d7b85e4 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/SparkResultToOrganizationFromIstRepoJob.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/SparkResultToOrganizationFromIstRepoJob.java @@ -124,7 +124,7 @@ public class SparkResultToOrganizationFromIstRepoJob { } private static FlatMapFunction, Relation> createRelationFn() { - return (FlatMapFunction, Relation>) value -> { + return value -> { List new_relations = new ArrayList<>(); ResultOrganizationSet potential_update = value._1(); Optional already_linked = Optional.ofNullable(value._2()); diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/dump/Constants.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/dump/Constants.java index 86a275ae2..00f0dd01c 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/dump/Constants.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/dump/Constants.java @@ -14,7 +14,7 @@ public class Constants { public static final String HARVESTED = "Harvested"; public static final String DEFAULT_TRUST = "0.9"; - public static final String USER_CLAIM = "Linked by user";; + public static final String USER_CLAIM = "Linked by user"; public static String COAR_ACCESS_RIGHT_SCHEMA = "http://vocabularies.coar-repositories.org/documentation/access_rights/"; @@ -44,7 +44,7 @@ public class Constants { public enum DUMPTYPE { COMPLETE("complete"), COMMUNITY("community"), FUNDER("funder"); - private String type; + private final String type; DUMPTYPE(String type) { this.type = type; diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/GraphHiveImporterJobTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/GraphHiveImporterJobTest.java index e95174670..32f6e7abc 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/GraphHiveImporterJobTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/GraphHiveImporterJobTest.java @@ -51,7 +51,7 @@ public class GraphHiveImporterJobTest { conf .set( "javax.jdo.option.ConnectionURL", - String.format(JDBC_DERBY_TEMPLATE, workingDir.resolve("warehouse").toString())); + String.format(JDBC_DERBY_TEMPLATE, workingDir.resolve("warehouse"))); spark = SparkSession .builder() diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java index 271ae8fc1..7c69c9635 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java @@ -40,7 +40,7 @@ public class DumpJobTest { private static final Logger log = LoggerFactory.getLogger(DumpJobTest.class); - private static CommunityMap map = new CommunityMap(); + private static final CommunityMap map = new CommunityMap(); static { map.put("egi", "EGI Federation"); diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/PrepareResultProjectJobTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/PrepareResultProjectJobTest.java index 0e8908418..d5a9ba8dd 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/PrepareResultProjectJobTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/PrepareResultProjectJobTest.java @@ -37,7 +37,7 @@ public class PrepareResultProjectJobTest { private static final Logger log = LoggerFactory .getLogger(eu.dnetlib.dhp.oa.graph.dump.PrepareResultProjectJobTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); @BeforeAll public static void beforeAll() throws IOException { diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/UpdateProjectInfoTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/UpdateProjectInfoTest.java index bd191c847..20a46cee0 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/UpdateProjectInfoTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/UpdateProjectInfoTest.java @@ -36,7 +36,7 @@ public class UpdateProjectInfoTest { private static final Logger log = LoggerFactory.getLogger(eu.dnetlib.dhp.oa.graph.dump.UpdateProjectInfoTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); @BeforeAll public static void beforeAll() throws IOException { diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpOrganizationProjectDatasourceTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpOrganizationProjectDatasourceTest.java index 62c7bf93c..69100a114 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpOrganizationProjectDatasourceTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpOrganizationProjectDatasourceTest.java @@ -37,7 +37,7 @@ public class DumpOrganizationProjectDatasourceTest { private static final Logger log = LoggerFactory .getLogger(DumpOrganizationProjectDatasourceTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); @BeforeAll public static void beforeAll() throws IOException { diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpRelationTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpRelationTest.java index fa3c2c131..d80eb3ec6 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpRelationTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/DumpRelationTest.java @@ -36,7 +36,7 @@ public class DumpRelationTest { private static final Logger log = LoggerFactory .getLogger(DumpRelationTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); @BeforeAll public static void beforeAll() throws IOException { diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/RelationFromOrganizationTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/RelationFromOrganizationTest.java index b92d19d46..ea2dc73ca 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/RelationFromOrganizationTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/complete/RelationFromOrganizationTest.java @@ -33,7 +33,7 @@ public class RelationFromOrganizationTest { private static final Logger log = LoggerFactory .getLogger(RelationFromOrganizationTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); String organizationCommunityMap = "{\"20|grid________::afaa39865943381c51f76c08725ffa75\":[\"mes\",\"euromarine\"], \"20|corda__h2020::e8dbe14cca9bf6fce09d468872f813f8\":[\"mes\",\"euromarine\"], \"20|snsf________::9b253f265e3bef5cae6d881fdf61aceb\":[\"mes\",\"euromarine\"],\"20|rcuk________::e054eea0a47665af8c3656b5785ccf76\":[\"mes\",\"euromarine\"],\"20|corda__h2020::edc18d67c9b11fb616ca9f6e1db1b151\":[\"mes\",\"euromarine\"],\"20|rcuk________::d5736d9da90521ddcdc7828a05a85e9a\":[\"mes\",\"euromarine\"],\"20|corda__h2020::f5d418d3aa1cf817ddefcc3fdc039f27\":[\"mes\",\"euromarine\"],\"20|snsf________::8fa091f8f25a846779acb4ea97b50aef\":[\"mes\",\"euromarine\"],\"20|corda__h2020::81e020977211c2c40fae2e1a50bffd71\":[\"mes\",\"euromarine\"],\"20|corda_______::81e020977211c2c40fae2e1a50bffd71\":[\"mes\",\"euromarine\"],\"20|snsf________::31d0a100e54e3cdb3c6f52d91e638c78\":[\"mes\",\"euromarine\"],\"20|corda__h2020::ea379ef91b8cc86f9ac5edc4169292db\":[\"mes\",\"euromarine\"],\"20|corda__h2020::f75ee2ee48e5cb0ec8c8d30aaa8fef70\":[\"mes\",\"euromarine\"],\"20|rcuk________::e16010089551a1a9182a94604fc0ea59\":[\"mes\",\"euromarine\"],\"20|corda__h2020::38531a2cce7c5c347ffc439b07c1f43b\":[\"mes\",\"euromarine\"],\"20|corda_______::38531a2cce7c5c347ffc439b07c1f43b\":[\"mes\",\"euromarine\"],\"20|grid________::b2cbbf5eadbbf87d534b022bad3191d7\":[\"mes\",\"euromarine\"],\"20|snsf________::74730ef1439d7f7636a8be58a6b471b8\":[\"mes\",\"euromarine\"],\"20|nsf_________::ad72e19043a5a467e35f9b444d11563e\":[\"mes\",\"euromarine\"],\"20|rcuk________::0fc3e92500290902a2d38ec2445e74c3\":[\"mes\",\"euromarine\"],\"20|grid________::ad2c29905da0eb3c06b3fa80cacd89ea\":[\"mes\",\"euromarine\"],\"20|corda__h2020::30b53e4d63d3724f00acb9cbaca40860\":[\"mes\",\"euromarine\"],\"20|corda__h2020::f60f84bee14ad93f0db0e49af1d5c317\":[\"mes\",\"euromarine\"], \"20|corda__h2020::7bf251ac3765b5e89d82270a1763d09f\":[\"mes\",\"euromarine\"], \"20|corda__h2020::65531bd11be9935948c7f2f4db1c1832\":[\"mes\",\"euromarine\"], \"20|corda__h2020::e0e98f86bbc76638bbb72a8fe2302946\":[\"mes\",\"euromarine\"], \"20|snsf________::3eb43582ac27601459a8d8b3e195724b\":[\"mes\",\"euromarine\"], \"20|corda__h2020::af2481dab65d06c8ea0ae02b5517b9b6\":[\"mes\",\"euromarine\"], \"20|corda__h2020::c19d05cfde69a50d3ebc89bd0ee49929\":[\"mes\",\"euromarine\"], \"20|corda__h2020::af0bfd9fc09f80d9488f56d71a9832f0\":[\"mes\",\"euromarine\"], \"20|rcuk________::f33c02afb0dc66c49d0ed97ca5dd5cb0\":[\"beopen\"], " + diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/funderresult/ResultLinkedToProjectTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/funderresult/ResultLinkedToProjectTest.java index dbe3db6fe..6c5ebbab3 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/funderresult/ResultLinkedToProjectTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/funderresult/ResultLinkedToProjectTest.java @@ -36,7 +36,7 @@ public class ResultLinkedToProjectTest { private static final Logger log = LoggerFactory .getLogger(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class); - private static HashMap map = new HashMap<>(); + private static final HashMap map = new HashMap<>(); @BeforeAll public static void beforeAll() throws IOException { diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java index 52a909f76..7e911f2b7 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java @@ -347,7 +347,7 @@ public class MigrateDbEntitiesApplicationTest { } private List getValueAsList(final String name, final List fields) { - return (List) getValueAs(name, fields); + return getValueAs(name, fields); } } diff --git a/dhp-workflows/dhp-graph-provision-scholexplorer/src/main/java/eu/dnetlib/dhp/provision/update/CrossrefClient.java b/dhp-workflows/dhp-graph-provision-scholexplorer/src/main/java/eu/dnetlib/dhp/provision/update/CrossrefClient.java index fac1da253..9ace7b37a 100644 --- a/dhp-workflows/dhp-graph-provision-scholexplorer/src/main/java/eu/dnetlib/dhp/provision/update/CrossrefClient.java +++ b/dhp-workflows/dhp-graph-provision-scholexplorer/src/main/java/eu/dnetlib/dhp/provision/update/CrossrefClient.java @@ -61,9 +61,8 @@ public class CrossrefClient { int size = decompresser.inflate(buffer); bos.write(buffer, 0, size); } - byte[] unzippeddata = bos.toByteArray(); decompresser.end(); - return new String(unzippeddata); + return bos.toString(); } catch (Throwable e) { throw new RuntimeException("Wrong record:" + blob, e); } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java index c87f0cd94..7d53d3554 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java @@ -188,7 +188,7 @@ public class PrepareRelationsJob { public static class RelationAggregator extends Aggregator { - private int maxRelations; + private final int maxRelations; public RelationAggregator(int maxRelations) { this.maxRelations = maxRelations; diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/SolrAdminApplication.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/SolrAdminApplication.java index 5fe452efe..410aff5ba 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/SolrAdminApplication.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/SolrAdminApplication.java @@ -27,7 +27,7 @@ public class SolrAdminApplication implements Closeable { DELETE_BY_QUERY, COMMIT } - private CloudSolrClient solrClient; + private final CloudSolrClient solrClient; public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -90,7 +90,7 @@ public class SolrAdminApplication implements Closeable { case COMMIT: return solrClient.commit(collection); default: - throw new IllegalArgumentException("action not managed: " + action.toString()); + throw new IllegalArgumentException("action not managed: " + action); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java index 9ff387c8c..a321bdba9 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java @@ -51,17 +51,17 @@ public class XmlIndexingJob { protected static final String DATE_FORMAT = "yyyy-MM-dd'T'hh:mm:ss'Z'"; - private String inputPath; + private final String inputPath; - private String format; + private final String format; - private int batchSize; + private final int batchSize; - private OutputFormat outputFormat; + private final OutputFormat outputFormat; - private String outputPath; + private final String outputPath; - private SparkSession spark; + private final SparkSession spark; public static void main(String[] args) throws Exception { diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/RelationPartitioner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/RelationPartitioner.java index 7bd8b9217..1043f9085 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/RelationPartitioner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/RelationPartitioner.java @@ -35,8 +35,7 @@ public class RelationPartitioner extends Partitioner { public boolean equals(Object obj) { if (obj instanceof RelationPartitioner) { RelationPartitioner p = (RelationPartitioner) obj; - if (p.numPartitions() == numPartitions()) - return true; + return p.numPartitions() == numPartitions(); } return false; } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java index 644ed98ab..526c1b5f4 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java @@ -1091,7 +1091,7 @@ public class XmlRecordFactory implements Serializable { if (StringUtils.isBlank(scheme)) { throw new IllegalArgumentException( - String.format("missing scheme for: <%s - %s>", type.toString(), targetType)); + String.format("missing scheme for: <%s - %s>", type, targetType)); } final HashSet fields = Sets.newHashSet(mapFields(link, contexts)); if (rel.getValidated() == null)