From b695932ae44057d5399b1f8b71d50deeb9ba5d8d Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 20 May 2021 15:34:04 +0200 Subject: [PATCH] integrated pull#108 --- .../project/utils/EXCELParser.java | 10 +++++--- .../project/utils/ReadExcel.java | 7 +++--- .../project/oozie_app/workflow.xml | 1 + .../dhp/actionmanager/project/parameters.json | 5 ++++ .../project/EXCELParserTest.java | 5 ++-- .../PrepareResultInstRepoAssociation.java | 23 ++++++++++++++++--- .../input_prepareresultorg_parameters.json | 7 +++++- .../oozie_app/workflow.xml | 1 + 8 files changed, 47 insertions(+), 12 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/EXCELParser.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/EXCELParser.java index 0f83499e4..1a6ebb9e8 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/EXCELParser.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/EXCELParser.java @@ -22,7 +22,7 @@ import org.apache.poi.xssf.usermodel.XSSFWorkbook; */ public class EXCELParser { - public List parse(InputStream file, String classForName) + public List parse(InputStream file, String classForName, String sheetName) throws ClassNotFoundException, IOException, IllegalAccessException, InstantiationException, InvalidFormatException { @@ -30,7 +30,11 @@ public class EXCELParser { OPCPackage pkg = OPCPackage.open(file); XSSFWorkbook wb = new XSSFWorkbook(pkg); - XSSFSheet sheet = wb.getSheet("cordisref-H2020topics"); + XSSFSheet sheet = wb.getSheet(sheetName); + + if (sheetName == null) { + throw new RuntimeException("Sheet name " + sheetName + " not present in current file"); + } List ret = new ArrayList<>(); @@ -49,7 +53,7 @@ public class EXCELParser { headers.add(dataFormatter.formatCellValue(cell)); } } else { - Class clazz = Class.forName("eu.dnetlib.dhp.actionmanager.project.utils.EXCELTopic"); + Class clazz = Class.forName(classForName); final Object cc = clazz.newInstance(); for (int i = 0; i < headers.size(); i++) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java index f05ed9c2c..5ce0a681c 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java @@ -41,19 +41,20 @@ public class ReadExcel implements Closeable { final String hdfsPath = parser.get("hdfsPath"); final String hdfsNameNode = parser.get("hdfsNameNode"); final String classForName = parser.get("classForName"); + final String sheetName = parser.get("sheetName"); try (final ReadExcel readExcel = new ReadExcel(hdfsPath, hdfsNameNode, fileURL)) { log.info("Getting Excel file..."); - readExcel.execute(classForName); + readExcel.execute(classForName, sheetName); } } - public void execute(final String classForName) throws Exception { + public void execute(final String classForName, final String sheetName) throws Exception { EXCELParser excelParser = new EXCELParser(); excelParser - .parse(excelFile, classForName) + .parse(excelFile, classForName, sheetName) .stream() .forEach(p -> write(p)); diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml index c710c8b55..8ce581885 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml @@ -65,6 +65,7 @@ --hdfsNameNode${nameNode} --fileURL${topicFileURL} --hdfsPath${workingDir}/topic + --sheetName${sheetName} --classForNameeu.dnetlib.dhp.actionmanager.project.utils.EXCELTopic diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json index dd3de70f6..b6c9c94b9 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json @@ -23,6 +23,11 @@ "paramLongName" : "classForName", "paramDescription" : "the name of the class to deserialize the csv to", "paramRequired" : true +}, { + "paramName": "sn", + "paramLongName" : "sheetName", + "paramDescription" : "the name of the sheet in case the file is excel", + "paramRequired" : false } diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java index f0557ec4e..1601d9b3e 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/EXCELParserTest.java @@ -35,8 +35,9 @@ public class EXCELParserTest { EXCELParser excelParser = new EXCELParser(); - List pl = excelParser - .parse(httpConnector.getInputSourceAsStream(URL), "eu.dnetlib.dhp.actionmanager.project.utils.ExcelTopic"); + final String classForName = "eu.dnetlib.dhp.actionmanager.project.utils.ExcelTopic"; + final String sheetName = "Topics"; + List pl = excelParser.parse(httpConnector.getInputSourceAsStream(URL), classForName, sheetName); Assertions.assertEquals(3837, pl.size()); diff --git a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/PrepareResultInstRepoAssociation.java b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/PrepareResultInstRepoAssociation.java index fe5889c53..a41399627 100644 --- a/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/PrepareResultInstRepoAssociation.java +++ b/dhp-workflows/dhp-enrichment/src/main/java/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/PrepareResultInstRepoAssociation.java @@ -4,6 +4,11 @@ package eu.dnetlib.dhp.resulttoorganizationfrominstrepo; import static eu.dnetlib.dhp.PropagationConstant.*; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + import org.apache.commons.io.IOUtils; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.spark.SparkConf; @@ -51,6 +56,11 @@ public class PrepareResultInstRepoAssociation { final String alreadyLinkedPath = parser.get("alreadyLinkedPath"); log.info("alreadyLinkedPath {}: ", alreadyLinkedPath); + List blacklist = Optional + .ofNullable(parser.get("blacklist")) + .map(v -> Arrays.asList(v.split(";"))) + .orElse(new ArrayList<>()); + SparkConf conf = new SparkConf(); conf.set("hive.metastore.uris", parser.get("hive_metastore_uris")); @@ -61,7 +71,7 @@ public class PrepareResultInstRepoAssociation { readNeededResources(spark, inputPath); removeOutputDir(spark, datasourceOrganizationPath); - prepareDatasourceOrganization(spark, datasourceOrganizationPath); + prepareDatasourceOrganization(spark, datasourceOrganizationPath, blacklist); removeOutputDir(spark, alreadyLinkedPath); prepareAlreadyLinkedAssociation(spark, alreadyLinkedPath); @@ -80,7 +90,14 @@ public class PrepareResultInstRepoAssociation { } private static void prepareDatasourceOrganization( - SparkSession spark, String datasourceOrganizationPath) { + SparkSession spark, String datasourceOrganizationPath, List blacklist) { + String blacklisted = ""; + if (blacklist.size() > 0) { + blacklisted = " AND d.id != '" + blacklist.get(0) + "'"; + for (int i = 1; i < blacklist.size(); i++) { + blacklisted += " AND d.id != '" + blacklist.get(i) + "'"; + } + } String query = "SELECT source datasourceId, target organizationId " + "FROM ( SELECT id " @@ -88,7 +105,7 @@ public class PrepareResultInstRepoAssociation { + "WHERE datasourcetype.classid = '" + INSTITUTIONAL_REPO_TYPE + "' " - + "AND datainfo.deletedbyinference = false ) d " + + "AND datainfo.deletedbyinference = false " + blacklisted + " ) d " + "JOIN ( SELECT source, target " + "FROM relation " + "WHERE lower(relclass) = '" diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/input_prepareresultorg_parameters.json b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/input_prepareresultorg_parameters.json index c74496350..2f00bacae 100644 --- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/input_prepareresultorg_parameters.json +++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/input_prepareresultorg_parameters.json @@ -28,5 +28,10 @@ "paramLongName": "isSparkSessionManaged", "paramDescription": "the path where prepared info have been stored", "paramRequired": false - } + },{ + "paramName": "bl", + "paramLongName": "blacklist", + "paramDescription": "institutional repositories that should not be considered for the propagation", + "paramRequired": false +} ] \ No newline at end of file diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml index 2fe9a4256..edfff8817 100644 --- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml @@ -141,6 +141,7 @@ --hive_metastore_uris${hive_metastore_uris} --datasourceOrganizationPath${workingDir}/preparedInfo/datasourceOrganization --alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked + --blacklist${blacklist}